diff --git a/.github/workflows/sync_activity.yml b/.github/workflows/sync_activity.yml new file mode 100644 index 000000000..54d80ee76 --- /dev/null +++ b/.github/workflows/sync_activity.yml @@ -0,0 +1,59 @@ +name: sync_game_activity + +on: + schedule: + - cron: '30 20 * * *' + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + + +jobs: + sync-activities: + runs-on: ubuntu-latest + steps: + - name: Checkout activities repository + uses: actions/checkout@v4 + with: + sparse-checkout: | + module/activities/ + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: '3.9' + - name: Install requirements + run: | + python -m pip install --upgrade pip + pip install -r module/activities/requirements-activity-updater.txt + - name: Run activity updater + run: python3 module/activities/activity_updater.py + env: + GITHUB_REPOSITORY_OWNER: ${{ github.repository_owner }} + - name: Upload update log as artifact + uses: actions/upload-artifact@v4 + with: + name: activity-update-log + path: module/activities/activity_update_log.md + - name: Copy updated activity.json if changed + if: success() && (hashFiles('module/activities/tmp_activity.json') != '') + # Only upload and create PR if any new updates to the existing activity data + run: cp module/activities/tmp_activity.json module/activities/activity.json + - name: Set the output log as PR body + if: success() && (hashFiles('module/activities/tmp_activity.json') != '') + id: pr_body + run: | + echo "body<> $GITHUB_OUTPUT + cat module/activities/activity_update_log.md >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + - name: Create Pull Request if updated + if: success() && (hashFiles('module/activities/tmp_activity.json') != '') + uses: peter-evans/create-pull-request@v7 + with: + commit-message: "chore(activity): sync activity.json (auto-update by workflow)" + title: "CI/sync activity.json (auto-update by workflow)" + body: "${{ steps.pr_body.outputs.body }}" + branch: "auto-sync-activity" + reviewers: | + ${{ github.repository_owner }} diff --git a/core/Baas_thread.py b/core/Baas_thread.py index 32078853b..7e90c2ee2 100644 --- a/core/Baas_thread.py +++ b/core/Baas_thread.py @@ -12,7 +12,6 @@ import cv2 import numpy as np import psutil -import requests import module.ExploreTasks.explore_task from core import position, picture, utils @@ -22,7 +21,6 @@ from core.device.Screenshot import Screenshot from core.device.connection import Connection from core.device.emulator_manager import process_api -from core.device.uiautomator2_client import BAAS_U2_Initer, __atx_agent_version__ from core.device.uiautomator2_client import U2Client from core.exception import RequestHumanTakeOver, FunctionCallTimeout, PackageIncorrect, LogTraceback from core.notification import notify, toast @@ -378,27 +376,6 @@ def _read_DeviceOption_ocr_language(self, _p): else: raise Exception("Global Server Invalid Language : " + game_lan + ".") - def check_atx(self): - self.logger.info("--------------Check ATX install ----------------") - _d = self.u2._wait_for_device() - if not _d: - raise RuntimeError("USB device %s is offline " + self.serial) - self.logger.info("Device [ " + self.serial + " ] is online.") - - version_url = self.u2.path2url("/version") - try: - version = requests.get(version_url, timeout=3).text - if version != __atx_agent_version__: - raise EnvironmentError("atx-agent need upgrade") - except (requests.RequestException, EnvironmentError): - self.set_up_atx_agent() - self.wait_uiautomator_start() - self.logger.info("Uiautomator2 service started.") - - def set_up_atx_agent(self): - init = BAAS_U2_Initer(self.u2._adb_device, self.logger) - init.install() - def send(self, msg, task=None): if msg == "start": if self.button_signal is not None: @@ -863,8 +840,8 @@ def daily_config_refresh(self): last_refresh_hour = last_refresh.hour daily_reset = 4 - (self.server == 'JP' or self.server == 'Global') if now.day == last_refresh.day and now.year == last_refresh.year and now.month == last_refresh.month and \ - ((hour < daily_reset and last_refresh_hour < daily_reset) or ( - hour >= daily_reset and last_refresh_hour >= daily_reset)): + ((hour < daily_reset and last_refresh_hour < daily_reset) or ( + hour >= daily_reset and last_refresh_hour >= daily_reset)): return else: self.config.last_refresh_config_time = time.time() @@ -981,9 +958,10 @@ def handle_resolution_dynamic_change(self): _new = self.connection.app_process_window.get_resolution() if self.resolution[0] == _new[0] and self.resolution[1] == _new[1]: return - self.logger.warning("Screen Resolution change detected, we don't recommend you to change screen resolution while running the script.") + self.logger.warning( + "Screen Resolution change detected, we don't recommend you to change screen resolution while running the script.") - _new = self._wait_resolution_change_finish(_new,10, 0.3) + _new = self._wait_resolution_change_finish(_new, 10, 0.3) if self.resolution[0] == _new[0] and self.resolution[1] == _new[1]: self.logger.info("Resolution unchanged.") return @@ -1020,7 +998,6 @@ def _wait_resolution_change_finish(self, last_res, static_cnt=10, interval=0.3): latest_res = _new time.sleep(interval) - @staticmethod def _accept_resolution(x, y, std_x=16, std_y=9, threshold=0.05): return abs(x / y - std_x / std_y) <= threshold @@ -1030,7 +1007,8 @@ def check_screen_ratio(self, width, height): screen_ratio = width // gcd, height // gcd if screen_ratio == (16, 9): return - self.logger.warning(f"Screen Ratio: {width}:{height} is not a precise 16:9 screen, we recommend you to use a precise 16:9 screen.") + self.logger.warning( + f"Screen Ratio: {width}:{height} is not a precise 16:9 screen, we recommend you to use a precise 16:9 screen.") if self._accept_resolution(width, height, 16, 9, 0.05): self.logger.info(f"Screen Ratio close to 16:9. Accept it.") return @@ -1040,15 +1018,13 @@ def check_screen_ratio(self, width, height): def _get_android_device_resolution(self): self.u2_client = U2Client.get_instance(self.serial) self.u2 = self.u2_client.get_connection() - self.check_atx() self.last_refresh_u2_time = time.time() return self.resolution_uiautomator2() def resolution_uiautomator2(self): for i in range(0, 3): try: - info = self.u2.http.get('/info').json() - w, h = info['display']['width'], info['display']['height'] + w, h = self.u2.info['displayWidth'], self.u2.info['displayHeight'] if w < h: w, h = h, w return w, h @@ -1060,17 +1036,3 @@ def main_page_update_data(self): self.get_ap(True) self.get_creditpoints(True) self.get_pyroxene(True) - - -if __name__ == '__main__': - print(os.path.exists( - "D:\\github\\bass\\blue_archive_auto_script\\src\\atx_app\\atx-agent_0.10.1_linux_386\\atx-agent")) - # "D:\\github\\bass\\blue_archive_auto_script\\src\\atx_app\\atx-agent_0.10.0_linux_386\\atx-agent" - import uiautomator2 - - u2 = uiautomator2.connect("127.0.0.1:16512") - from core.utils import Logger - - logger = Logger(None) - init = BAAS_U2_Initer(u2._adb_device, logger) - init.uninstall() diff --git a/core/device/screenshot/uiautomator2.py b/core/device/screenshot/uiautomator2.py index 599c53594..35e88ef3c 100644 --- a/core/device/screenshot/uiautomator2.py +++ b/core/device/screenshot/uiautomator2.py @@ -9,6 +9,8 @@ def __init__(self, conn): def screenshot(self): for i in range(5): try: - return self.u2.screenshot() + screenshot = self.u2.screenshot() + if screenshot is not None: + return screenshot except Exception as e: print(e) diff --git a/core/device/uiautomator2_client.py b/core/device/uiautomator2_client.py index 02761bdd8..039f798c3 100644 --- a/core/device/uiautomator2_client.py +++ b/core/device/uiautomator2_client.py @@ -1,17 +1,8 @@ -import datetime -from retry import retry -import adbutils -import uiautomator2 as u2 +import base64 + import cv2 import numpy as np -import requests -from uiautomator2.version import (__apk_version__, __atx_agent_version__, __version__) -import os -import json - -appdir = os.path.join(os.path.expanduser("~"), '.uiautomator2') - -GITHUB_BASEURL = "https://github.com/openatx" +import uiautomator2 as u2 class U2Client: @@ -42,202 +33,17 @@ def swipe(self, x1, y1, x2, y2, duration): self.connection.swipe(x1, y1, x2, y2, duration) def screenshot(self): - return cv2.cvtColor(np.array(self.connection.screenshot()), cv2.COLOR_RGB2BGR) + # copied and modified from uiautomator2 source code + # original version do not support unadjusted screenshot + base64_data = self.connection.jsonrpc.takeScreenshot(1, 100) + # takeScreenshot may return None + if base64_data: + jpg_raw = base64.b64decode(base64_data) + img_array = np.frombuffer(jpg_raw, dtype=np.uint8) + img = cv2.imdecode(img_array, cv2.IMREAD_COLOR) + return img + else: + return None def get_connection(self): return self.connection - - - - -class BAAS_U2_Initer: - """ - Class to initialize uiautomator2 by following local files - src/atx_app - │ - ├── app-uiautomator.apk - ├── app-uiautomator-test.apk - └── atx-agent(in different archs) - - If it's your first time to use uiautomator2 (or start baas), it will initialize by downloading files from github. - It will cause following error if you can't connect to github (mainly by cn users): - - HTTPSConnectionPool(host='github.com', port=443): - Max retries exceeded with url:/openatx/atx-agent/releases/download/0.10.0/atx-agent_0.10.0_linux_386_tar_gz - (Caused by NewconnectionError(': failed to establish a new connection:[winError 10061]由于目标计算机积极拒绝,无法连接。)) - - """ - - def __init__(self, device: adbutils.AdbDevice, logger): - d = self._device = device - self.sdk = d.getprop('ro.build.version.sdk') - self.abi = d.getprop('ro.product.cpu.abi') - self.pre = d.getprop('ro.build.version.preview_sdk') - self.arch = d.getprop('ro.arch') - self.abis = (d.getprop('ro.product.cpu.abilist').strip() or self.abi).split(",") - - self.__atx_listen_addr = "127.0.0.1:7912" - self.logger = logger - # self.logger.debug("Initial device %s", device) - self.logger.info("uiautomator2 version: [ " + __version__ + " ].") - - @property - def atx_agent_path(self): - return "/data/local/tmp/atx-agent" - - def shell(self, *args, timeout=60): - return self._device.shell(args, timeout=timeout) - - @property - def local_atx_agent_path(self): - """ - Returns: - str: local atx-agent path according to device abi - """ - files = { - 'armeabi-v7a': 'atx-agent_{v}_linux_armv7/atx-agent', - 'arm64-v8a': 'atx-agent_{v}_linux_arm64/atx-agent', - 'armeabi': 'atx-agent_{v}_linux_armv6/atx-agent', - 'x86': 'atx-agent_{v}_linux_386/atx-agent', - 'x86_64': 'atx-agent_{v}_linux_386/atx-agent', - } - name = None - for abi in self.abis: - name = files.get(abi) - if name: - break - if not name: - raise Exception( - "arch(%s) need to be supported yet, please report an issue in github" - % self.abis) - return os.path.abspath("src/atx_app/%s" % name.format(v=__atx_agent_version__)) - - def is_apk_outdated(self): - """ - If apk signature mismatch, the uiautomator test will fail to start - command: am instrument -w -r -e debug false \ - -e class com.github.uiautomator.stub.Stub \ - com.github.uiautomator.test/android.support.test.runner.AndroidJUnitRunner - java.lang.SecurityException: Permission Denial: \ - starting instrumentation ComponentInfo{com.github.uiautomator.test/android.support.test.runner.AndroidJUnitRunner} \ - from pid=7877, uid=7877 not allowed \ - because package com.github.uiautomator.test does not have a signature matching the target com.github.uiautomator - """ - apk_debug = self._device.package_info("com.github.uiautomator") - apk_debug_test = self._device.package_info("com.github.uiautomator.test") - self.logger.info("apk-debug package-info: [ " + str(apk_debug) + " ].") - self.logger.info("apk-debug-test package-info: [ " + str(apk_debug_test) + " ].") - if not apk_debug or not apk_debug_test: - return True - if apk_debug['version_name'] != __apk_version__: - self.logger.info( - "package com.github.uiautomator version [ " + apk_debug[ - 'version_name'] + " ] latest [ " + __apk_version__ + " ].") - return True - - if apk_debug['signature'] != apk_debug_test['signature']: - # On vivo-Y67 signature might not same, but signature matched. - # So here need to check first_install_time again - max_delta = datetime.timedelta(minutes=3) - if abs(apk_debug['first_install_time'] - - apk_debug_test['first_install_time']) > max_delta: - self.logger.info( - "package com.github.uiautomator does not have a signature matching the target com.github.uiautomator" - ) - return True - return False - - def is_atx_agent_outdated(self): - """ - Returns: - bool - """ - agent_version = self._device.shell([self.atx_agent_path, "version"]).strip() - if agent_version == "dev": - self.logger.info("skip version check for atx-agent dev") - return False - - # semver major.minor.patch - try: - real_ver = list(map(int, agent_version.split("."))) - want_ver = list(map(int, __atx_agent_version__.split("."))) - except ValueError: - return True - - self.logger.info("Real version: " + str(real_ver) + ", Expect version:" + str(want_ver) + ".") - - if real_ver[:2] != want_ver[:2]: - return True - - return real_ver[2] < want_ver[2] - - def _install_uiautomator_apks(self): - """ use uiautomator 2.0 to run uiautomator test - 通常在连接USB数据线的情况下调用 - """ - self.shell("pm", "uninstall", "com.github.uiautomator") - self.shell("pm", "uninstall", "com.github.uiautomator.test") - for filename, url in app_uiautomator_apk_local_path(): - path = self.push_url(url, mode=0o644) - self.shell("pm", "install", "-r", "-t", path) - self.logger.info("- " + filename + " installed.") - - def push_url(self, path, dest=None, mode=0o755): - if not dest: - dest = self.atx_agent_path - - self.logger.info("Push to + " + dest + " . ") - self._device.sync.push(path, dest, mode=mode) - return dest - - def setup_atx_agent(self): - # stop atx-agent first - self.logger.info("Stop atx-agent.") - self.shell(self.atx_agent_path, "server", "--stop") - if self.is_atx_agent_outdated(): - self.logger.info("Install atx-agent [ " + __atx_agent_version__ + " ].") - self.push_url(self.local_atx_agent_path) - - self.logger.info("Start atx-agent.") - self.shell(self.atx_agent_path, 'server', '--nouia', '-d', "--addr", self.__atx_listen_addr) - self.logger.info("Check atx-agent version") - self.check_atx_agent_version() - - @retry((requests.ConnectionError, requests.ReadTimeout, requests.HTTPError), delay=.5, tries=10) - def check_atx_agent_version(self): - port = self._device.forward_port(7912) - self.logger.info("Forward: local:tcp:" + str(port) + " -> remote:tcp:7912") - version = requests.get("http://%s:%d/version" % (self._device._client.host, port)).text.strip() - self.logger.info("atx-agent version [ " + version + " ].") - - wlan_ip = requests.get("http://%s:%d/wlan/ip" % (self._device._client.host, port)).text.strip() - self.logger.info("device wlan ip: [ " + wlan_ip + " ].") - return version - - def install(self): - if self.is_apk_outdated(): - self.logger.info( - "Install com.github.uiautomator, com.github.uiautomator.test + [ " + __apk_version__ + " ].") - self._install_uiautomator_apks() - else: - self.logger.info("Already installed com.github.uiautomator apks") - self.setup_atx_agent() - - def uninstall(self): - self._device.shell([self.atx_agent_path, "server", "--stop"]) - self._device.shell(["rm", self.atx_agent_path]) - self.logger.info("atx-agent stopped and removed") - self._device.shell(["pm", "uninstall", "com.github.uiautomator"]) - self._device.shell(["pm", "uninstall", "com.github.uiautomator.test"]) - self.logger.info("com.github.uiautomator uninstalled.") - - -def app_uiautomator_apk_local_path(): - """ - Returns: - List[Tuple[str, str]]: [(filename, local_path)] - """ - ret = [] - for name in ["app-uiautomator.apk", "app-uiautomator-test.apk"]: - ret.append((name, "src/atx_app/" + name)) - return ret diff --git a/module/activities/.gitignore b/module/activities/.gitignore new file mode 100644 index 000000000..c5865f8fc --- /dev/null +++ b/module/activities/.gitignore @@ -0,0 +1,2 @@ +activity_update_log.md +tmp_activity.json diff --git a/module/activities/__init__.py b/module/activities/__init__.py index 43e827968..d4bb0c613 100644 --- a/module/activities/__init__.py +++ b/module/activities/__init__.py @@ -1,20 +1,9 @@ -from module.activities import AbydosResortRestorationCommittee -from module.activities import BaskingInTheBrillianceOfTheirSerenade -from module.activities import Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory -from module.activities import FromOpera0068WithLove -from module.activities import NewYearsAperitifOneAndDoneMatch -from module.activities import PresidentHinasSummerVacation -from module.activities import SummerSkysWishes -from module.activities import SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp -from module.activities import SweetSecretsAndGunfightsATaleOfAfterSchoolSweets -from module.activities import TheCathedralsMerryChristmas -from module.activities import anUnconcealedHeart -from module.activities import bunnyChaserOnTheShip -from module.activities import iveAlive -from module.activities import livelyAndJoyfulWalkingTour -from module.activities import no_227_kinosaki_spa -from module.activities import no_68_spring_wild_dream -from module.activities import pleasant_Valentines_Day_in_schale -from module.activities import reckless_nun_and_the_witch_in_the_old_library -from module.activities import revolutionKupalaNight -from module.activities import sakura_flowing_chaos_in_the_gala +from module.activities.activitiy_data import TheCathedralsMerryChristmas, PresidentHinasSummerVacation, \ + reckless_nun_and_the_witch_in_the_old_library, anUnconcealedHeart, bunnyChaserOnTheShip, \ + AbydosResortRestorationCommittee, NewYearsAperitifOneAndDoneMatch, revolutionKupalaNight, \ + SweetSecretsAndGunfightsATaleOfAfterSchoolSweets, SummerSkysWishes, sakura_flowing_chaos_in_the_gala, \ + Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory, BaskingInTheBrillianceOfTheirSerenade, \ + no_68_spring_wild_dream, iveAlive, livelyAndJoyfulWalkingTour, FromOpera0068WithLove, \ + SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp, no_227_kinosaki_spa, \ + pleasant_Valentines_Day_in_schale +from module.activities import activity_updater diff --git a/module/activities/ALittleBeforetheFoodPiledUpTheBestGame.py b/module/activities/activitiy_data/ALittleBeforetheFoodPiledUpTheBestGame.py similarity index 100% rename from module/activities/ALittleBeforetheFoodPiledUpTheBestGame.py rename to module/activities/activitiy_data/ALittleBeforetheFoodPiledUpTheBestGame.py diff --git a/module/activities/AbydosResortRestorationCommittee.py b/module/activities/activitiy_data/AbydosResortRestorationCommittee.py similarity index 100% rename from module/activities/AbydosResortRestorationCommittee.py rename to module/activities/activitiy_data/AbydosResortRestorationCommittee.py diff --git a/module/activities/BaskingInTheBrillianceOfTheirSerenade.py b/module/activities/activitiy_data/BaskingInTheBrillianceOfTheirSerenade.py similarity index 100% rename from module/activities/BaskingInTheBrillianceOfTheirSerenade.py rename to module/activities/activitiy_data/BaskingInTheBrillianceOfTheirSerenade.py diff --git a/module/activities/Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory.py b/module/activities/activitiy_data/Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory.py similarity index 100% rename from module/activities/Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory.py rename to module/activities/activitiy_data/Battle_Before_the_New_Years_Dinner_Let_Us_Play_For_The_Victory.py diff --git a/module/activities/CodeBox.py b/module/activities/activitiy_data/CodeBox.py similarity index 100% rename from module/activities/CodeBox.py rename to module/activities/activitiy_data/CodeBox.py diff --git a/module/activities/D_U_ShiratoriWardRestorationWork.py b/module/activities/activitiy_data/D_U_ShiratoriWardRestorationWork.py similarity index 100% rename from module/activities/D_U_ShiratoriWardRestorationWork.py rename to module/activities/activitiy_data/D_U_ShiratoriWardRestorationWork.py diff --git a/module/activities/ElectronicNewYearsMarch.py b/module/activities/activitiy_data/ElectronicNewYearsMarch.py similarity index 100% rename from module/activities/ElectronicNewYearsMarch.py rename to module/activities/activitiy_data/ElectronicNewYearsMarch.py diff --git a/module/activities/FromOpera0068WithLove.py b/module/activities/activitiy_data/FromOpera0068WithLove.py similarity index 100% rename from module/activities/FromOpera0068WithLove.py rename to module/activities/activitiy_data/FromOpera0068WithLove.py diff --git a/module/activities/GakumanDoujinshiTheFinalEpisodeSoughtByTwoPeople.py b/module/activities/activitiy_data/GakumanDoujinshiTheFinalEpisodeSoughtByTwoPeople.py similarity index 100% rename from module/activities/GakumanDoujinshiTheFinalEpisodeSoughtByTwoPeople.py rename to module/activities/activitiy_data/GakumanDoujinshiTheFinalEpisodeSoughtByTwoPeople.py diff --git a/module/activities/GetSetGoKivotosHaloGames.py b/module/activities/activitiy_data/GetSetGoKivotosHaloGames.py similarity index 100% rename from module/activities/GetSetGoKivotosHaloGames.py rename to module/activities/activitiy_data/GetSetGoKivotosHaloGames.py diff --git a/module/activities/InSearchOfAHiddenHeritageTrinity'sExtracurricularActivities.py b/module/activities/activitiy_data/InSearchOfAHiddenHeritageTrinity'sExtracurricularActivities.py similarity index 100% rename from module/activities/InSearchOfAHiddenHeritageTrinity'sExtracurricularActivities.py rename to module/activities/activitiy_data/InSearchOfAHiddenHeritageTrinity'sExtracurricularActivities.py diff --git a/module/activities/JP_2025_03_26.py b/module/activities/activitiy_data/JP_2025_03_26.py similarity index 100% rename from module/activities/JP_2025_03_26.py rename to module/activities/activitiy_data/JP_2025_03_26.py diff --git a/module/activities/JP_2025_04_22.py b/module/activities/activitiy_data/JP_2025_04_22.py similarity index 100% rename from module/activities/JP_2025_04_22.py rename to module/activities/activitiy_data/JP_2025_04_22.py diff --git a/module/activities/JP_2025_06_25.py b/module/activities/activitiy_data/JP_2025_06_25.py similarity index 100% rename from module/activities/JP_2025_06_25.py rename to module/activities/activitiy_data/JP_2025_06_25.py diff --git a/module/activities/JP_2025_07_22.py b/module/activities/activitiy_data/JP_2025_07_22.py similarity index 100% rename from module/activities/JP_2025_07_22.py rename to module/activities/activitiy_data/JP_2025_07_22.py diff --git a/module/activities/JP_2025_08_20.py b/module/activities/activitiy_data/JP_2025_08_20.py similarity index 100% rename from module/activities/JP_2025_08_20.py rename to module/activities/activitiy_data/JP_2025_08_20.py diff --git a/module/activities/LivelyandBusily.py b/module/activities/activitiy_data/LivelyandBusily.py similarity index 100% rename from module/activities/LivelyandBusily.py rename to module/activities/activitiy_data/LivelyandBusily.py diff --git a/module/activities/Moonlight Dream.py b/module/activities/activitiy_data/Moonlight Dream.py similarity index 100% rename from module/activities/Moonlight Dream.py rename to module/activities/activitiy_data/Moonlight Dream.py diff --git a/module/activities/NewYearsAperitifOneAndDoneMatch.py b/module/activities/activitiy_data/NewYearsAperitifOneAndDoneMatch.py similarity index 100% rename from module/activities/NewYearsAperitifOneAndDoneMatch.py rename to module/activities/activitiy_data/NewYearsAperitifOneAndDoneMatch.py diff --git a/module/activities/OnYourMarkAtMillenniumKivotosHaloFestival.py b/module/activities/activitiy_data/OnYourMarkAtMillenniumKivotosHaloFestival.py similarity index 100% rename from module/activities/OnYourMarkAtMillenniumKivotosHaloFestival.py rename to module/activities/activitiy_data/OnYourMarkAtMillenniumKivotosHaloFestival.py diff --git a/module/activities/PandemicHazardAMiraclePancake.py b/module/activities/activitiy_data/PandemicHazardAMiraclePancake.py similarity index 100% rename from module/activities/PandemicHazardAMiraclePancake.py rename to module/activities/activitiy_data/PandemicHazardAMiraclePancake.py diff --git a/module/activities/PlayHideAndSeekAtImaginationLand.py b/module/activities/activitiy_data/PlayHideAndSeekAtImaginationLand.py similarity index 100% rename from module/activities/PlayHideAndSeekAtImaginationLand.py rename to module/activities/activitiy_data/PlayHideAndSeekAtImaginationLand.py diff --git a/module/activities/PresidentHinasSummerVacation.py b/module/activities/activitiy_data/PresidentHinasSummerVacation.py similarity index 100% rename from module/activities/PresidentHinasSummerVacation.py rename to module/activities/activitiy_data/PresidentHinasSummerVacation.py diff --git a/module/activities/RowdyAndCherry.py b/module/activities/activitiy_data/RowdyAndCherry.py similarity index 100% rename from module/activities/RowdyAndCherry.py rename to module/activities/activitiy_data/RowdyAndCherry.py diff --git a/module/activities/RyubuDoushuWhatsEnvisionedisOneSoleFuture.py b/module/activities/activitiy_data/RyubuDoushuWhatsEnvisionedisOneSoleFuture.py similarity index 100% rename from module/activities/RyubuDoushuWhatsEnvisionedisOneSoleFuture.py rename to module/activities/activitiy_data/RyubuDoushuWhatsEnvisionedisOneSoleFuture.py diff --git a/module/activities/SayBing.py b/module/activities/activitiy_data/SayBing.py similarity index 100% rename from module/activities/SayBing.py rename to module/activities/activitiy_data/SayBing.py diff --git a/module/activities/SecretMidnightParty.py b/module/activities/activitiy_data/SecretMidnightParty.py similarity index 100% rename from module/activities/SecretMidnightParty.py rename to module/activities/activitiy_data/SecretMidnightParty.py diff --git a/module/activities/SerenadePromenade.py b/module/activities/activitiy_data/SerenadePromenade.py similarity index 100% rename from module/activities/SerenadePromenade.py rename to module/activities/activitiy_data/SerenadePromenade.py diff --git a/module/activities/ShesideOutside.py b/module/activities/activitiy_data/ShesideOutside.py similarity index 100% rename from module/activities/ShesideOutside.py rename to module/activities/activitiy_data/ShesideOutside.py diff --git a/module/activities/SummerSkysWishes.py b/module/activities/activitiy_data/SummerSkysWishes.py similarity index 100% rename from module/activities/SummerSkysWishes.py rename to module/activities/activitiy_data/SummerSkysWishes.py diff --git a/module/activities/SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp.py b/module/activities/activitiy_data/SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp.py similarity index 100% rename from module/activities/SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp.py rename to module/activities/activitiy_data/SummerSpecialOperationsRABBITPlatoonAndTheMysteryOfTheMissingShrimp.py diff --git a/module/activities/SunlightGirlsNightSong.py b/module/activities/activitiy_data/SunlightGirlsNightSong.py similarity index 100% rename from module/activities/SunlightGirlsNightSong.py rename to module/activities/activitiy_data/SunlightGirlsNightSong.py diff --git a/module/activities/SweetSecretsAndGunfightsATaleOfAfterSchoolSweets.py b/module/activities/activitiy_data/SweetSecretsAndGunfightsATaleOfAfterSchoolSweets.py similarity index 100% rename from module/activities/SweetSecretsAndGunfightsATaleOfAfterSchoolSweets.py rename to module/activities/activitiy_data/SweetSecretsAndGunfightsATaleOfAfterSchoolSweets.py diff --git a/module/activities/TheCathedralsMerryChristmas.py b/module/activities/activitiy_data/TheCathedralsMerryChristmas.py similarity index 100% rename from module/activities/TheCathedralsMerryChristmas.py rename to module/activities/activitiy_data/TheCathedralsMerryChristmas.py diff --git a/module/activities/TheFiveSensesAreClouded.py b/module/activities/activitiy_data/TheFiveSensesAreClouded.py similarity index 100% rename from module/activities/TheFiveSensesAreClouded.py rename to module/activities/activitiy_data/TheFiveSensesAreClouded.py diff --git a/module/activities/TripTrapTrain.py b/module/activities/activitiy_data/TripTrapTrain.py similarity index 100% rename from module/activities/TripTrapTrain.py rename to module/activities/activitiy_data/TripTrapTrain.py diff --git a/module/activities/WaffleCrisisTheMiracleMoment.py b/module/activities/activitiy_data/WaffleCrisisTheMiracleMoment.py similarity index 100% rename from module/activities/WaffleCrisisTheMiracleMoment.py rename to module/activities/activitiy_data/WaffleCrisisTheMiracleMoment.py diff --git a/module/activities/WarningOfWhiteChalkWhereAestheticsResideInTheMansionOfDeception.py b/module/activities/activitiy_data/WarningOfWhiteChalkWhereAestheticsResideInTheMansionOfDeception.py similarity index 100% rename from module/activities/WarningOfWhiteChalkWhereAestheticsResideInTheMansionOfDeception.py rename to module/activities/activitiy_data/WarningOfWhiteChalkWhereAestheticsResideInTheMansionOfDeception.py diff --git a/module/activities/activitiy_data/__init__.py b/module/activities/activitiy_data/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/module/activities/anUnconcealedHeart.py b/module/activities/activitiy_data/anUnconcealedHeart.py similarity index 100% rename from module/activities/anUnconcealedHeart.py rename to module/activities/activitiy_data/anUnconcealedHeart.py diff --git a/module/activities/bunnyChaserOnTheShip.py b/module/activities/activitiy_data/bunnyChaserOnTheShip.py similarity index 100% rename from module/activities/bunnyChaserOnTheShip.py rename to module/activities/activitiy_data/bunnyChaserOnTheShip.py diff --git a/module/activities/bunnyChaserOnTheShip2.py b/module/activities/activitiy_data/bunnyChaserOnTheShip2.py similarity index 100% rename from module/activities/bunnyChaserOnTheShip2.py rename to module/activities/activitiy_data/bunnyChaserOnTheShip2.py diff --git a/module/activities/iveAlive.py b/module/activities/activitiy_data/iveAlive.py similarity index 100% rename from module/activities/iveAlive.py rename to module/activities/activitiy_data/iveAlive.py diff --git a/module/activities/livelyAndJoyfulWalkingTour.py b/module/activities/activitiy_data/livelyAndJoyfulWalkingTour.py similarity index 100% rename from module/activities/livelyAndJoyfulWalkingTour.py rename to module/activities/activitiy_data/livelyAndJoyfulWalkingTour.py diff --git a/module/activities/no_227_kinosaki_spa.py b/module/activities/activitiy_data/no_227_kinosaki_spa.py similarity index 100% rename from module/activities/no_227_kinosaki_spa.py rename to module/activities/activitiy_data/no_227_kinosaki_spa.py diff --git a/module/activities/no_68_spring_wild_dream.py b/module/activities/activitiy_data/no_68_spring_wild_dream.py similarity index 100% rename from module/activities/no_68_spring_wild_dream.py rename to module/activities/activitiy_data/no_68_spring_wild_dream.py diff --git a/module/activities/pleasant_Valentines_Day_in_schale.py b/module/activities/activitiy_data/pleasant_Valentines_Day_in_schale.py similarity index 100% rename from module/activities/pleasant_Valentines_Day_in_schale.py rename to module/activities/activitiy_data/pleasant_Valentines_Day_in_schale.py diff --git a/module/activities/reckless_nun_and_the_witch_in_the_old_library.py b/module/activities/activitiy_data/reckless_nun_and_the_witch_in_the_old_library.py similarity index 100% rename from module/activities/reckless_nun_and_the_witch_in_the_old_library.py rename to module/activities/activitiy_data/reckless_nun_and_the_witch_in_the_old_library.py diff --git a/module/activities/revolutionKupalaNight.py b/module/activities/activitiy_data/revolutionKupalaNight.py similarity index 100% rename from module/activities/revolutionKupalaNight.py rename to module/activities/activitiy_data/revolutionKupalaNight.py diff --git a/module/activities/sakura_flowing_chaos_in_the_gala.py b/module/activities/activitiy_data/sakura_flowing_chaos_in_the_gala.py similarity index 100% rename from module/activities/sakura_flowing_chaos_in_the_gala.py rename to module/activities/activitiy_data/sakura_flowing_chaos_in_the_gala.py diff --git a/module/activities/activity.json b/module/activities/activity.json new file mode 100644 index 000000000..49cf2e309 --- /dev/null +++ b/module/activities/activity.json @@ -0,0 +1,69 @@ +{ + "last_update_time": 1758659959, + "JP": { + "Events": [], + "Raids": { + "total_assault": null, + "grand_assault": null, + "limit_break_assault": null, + "joint_firing_drill": null + }, + "Rewards": { + "commissions_rewards": 1, + "scrimmage_rewards": 1, + "bounty_hunts_rewards": 1, + "schedule_rewards": 1, + "normal_mission_rewards": 1, + "hard_mission_rewards": 1, + "level_exp_rewards": 1 + } + }, + "Global": { + "Events": [ + { + "name": "Highlander Railroad Runaway Incident: And Then The Train Disappeared", + "Start date": "1758592800", + "End date": "1759802400" + } + ], + "Raids": { + "total_assault": null, + "grand_assault": null, + "limit_break_assault": null, + "joint_firing_drill": null + }, + "Rewards": { + "commissions_rewards": 1, + "scrimmage_rewards": 1, + "bounty_hunts_rewards": 2, + "schedule_rewards": 1, + "normal_mission_rewards": 1, + "hard_mission_rewards": 1, + "level_exp_rewards": 1 + } + }, + "CN": { + "Events": [ + { + "name": "New Year's Aperitif ~One-and-Done Match~", + "begin_time": "1757570400", + "end_time": "1758780000" + } + ], + "Raids": { + "total_assault": null, + "grand_assault": null, + "limit_break_assault": null, + "joint_firing_drill": null + }, + "Rewards": { + "commissions_rewards": 1, + "scrimmage_rewards": 1, + "bounty_hunts_rewards": 1, + "schedule_rewards": 1, + "normal_mission_rewards": 1, + "hard_mission_rewards": 2, + "level_exp_rewards": 1 + } + } +} \ No newline at end of file diff --git a/module/activities/activity_updater.py b/module/activities/activity_updater.py new file mode 100644 index 000000000..a89e910f1 --- /dev/null +++ b/module/activities/activity_updater.py @@ -0,0 +1,508 @@ +import copy +import difflib +import json +import logging +import os +import re +import sys +import time +from collections import OrderedDict +from datetime import datetime +from zoneinfo import ZoneInfo + +import requests +from lxml import etree + +HEADERS = { + "User-Agent": "Mozilla/5.0 (Linux; Android 13; Pixel 7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 " + "Mobile Safari/537.36", + "Content-Type": "text/html", + 'Accept': 'application/json, text/javascript, */*; q=0.01', + 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8', + 'game-alias': 'ba' +} + +BASIC_RESULT_FORMAT = { + "Events": [], + "Raids": { + "total_assault": None, + "grand_assault": None, + "limit_break_assault": None, + "joint_firing_drill": None, + }, + "Rewards": { + "commissions_rewards": 1, + "scrimmage_rewards": 1, + "bounty_hunts_rewards": 1, + "schedule_rewards": 1, + "normal_mission_rewards": 1, + "hard_mission_rewards": 1, + "level_exp_rewards": 1 + } +} + +FULL_RESULT_FORMAT = { + "JP": copy.deepcopy(BASIC_RESULT_FORMAT), + "Global": copy.deepcopy(BASIC_RESULT_FORMAT), + "CN": copy.deepcopy(BASIC_RESULT_FORMAT) +} + +DEFAULT_HEADER_MAP = { + "Name (EN)": "name", + "Name (JP)": None, + "Raid name": "name", + "Start Date": "begin_time", + "End Date": "end_time", + "Period": None, + "Notes": None, + "Season": None, + "Stages": None, + "Challenge": None, + "Special Rules": None, + "": None, +} + + +def _fetch_activity_table(html, xpath: str, header_map: dict = None, force_list: bool = False): + activities = [] + + # Locate the table using the provided XPath + try: + event_schedules = html.xpath(xpath)[0] + except IndexError: + raise KeyError("No event table found with the provided XPath.") + + event_trs = event_schedules.xpath('.//tr') + if len(event_trs) < 1: + raise KeyError("No event data found.") + + # Extract table headers + headers = [''.join(th.itertext()).strip() for th in event_trs[0]] + + start_date_index = headers.index('Start date') if 'Start date' in headers else -1 + end_date_index = headers.index('End date') if 'End date' in headers else -1 + period_index = headers.index('Period') if 'Period' in headers else -1 + for event_tr in event_trs[1:]: + values = [''.join(item.itertext()).strip() for item in event_tr] + + # If there's a start and end date column, convert them to epoch time + if start_date_index != -1 and end_date_index != -1: + start_time, end_time = _to_epoch_time(values[start_date_index]), _to_epoch_time(values[end_date_index]) + + # Skip events that are not active + current_time = int(time.time()) + if current_time < start_time or current_time > end_time: + continue + + values[start_date_index], values[end_date_index] = str(start_time), str(end_time) + if period_index != -1: + dates = values[period_index].split(' ~ ') + if len(dates) != 2: + raise KeyError("Period column does not contain valid start and end dates.") + start_time, end_time = _to_epoch_time(dates[0]), _to_epoch_time(dates[1]) + + # Skip events that are not active + current_time = int(time.time()) + if current_time < start_time or current_time > end_time: + continue + + # Manually add begin_time and end_time columns + headers += ["begin_time", "end_time"] + values += [str(start_time), str(end_time)] + + # update the table with key_map settings + if header_map is None: + header_map = {} + header_map.update(DEFAULT_HEADER_MAP) + pop_list = [] + for i in range(len(headers)): + if headers[i] in header_map: + if header_map[headers[i]] is not None: + headers[i] = header_map[headers[i]] + else: + pop_list.insert(0, i) + for idx in pop_list: + headers.pop(idx) + values.pop(idx) + activities.append(dict(zip(headers, values))) + # Sort activities by end date + activities.sort(key=lambda x: x[headers[2]], reverse=True) + if not force_list: + if len(activities) == 0: + return None + elif len(activities) == 1: + return activities[0] + return activities + + +def _to_epoch_time(s: str) -> int: + if re.fullmatch(r'\d{4}-\d{2}-\d{2}', s): + dt = datetime.strptime(s + ' 10:00', '%Y-%m-%d %H:%M') + dt = dt.replace(tzinfo=ZoneInfo("Asia/Shanghai")) + return int(dt.timestamp()) + + if re.fullmatch(r'\d{1,2}/\d{1,2}/\d{4} \d{2}:\d{2}', s): + dt = datetime.strptime(s, '%m/%d/%Y %H:%M') + dt = dt.replace(tzinfo=ZoneInfo("Asia/Tokyo")) + return int(dt.timestamp()) + + if re.fullmatch(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', s): + dt = datetime.strptime(s, '%Y-%m-%d %H:%M') + dt = dt.replace(tzinfo=ZoneInfo("Asia/Shanghai")) + return int(dt.timestamp()) + + raise ValueError(f"Unrecognized time format: {s}") + + +def _download_json(url): + logger = logging.getLogger("activity_updater") + for i in range(4): + response = requests.get( + url=url, + headers=HEADERS, + ) + if response.status_code != 200: + logger.error(f"Failed to fetch data from {url}, error code: {response.status_code},retry {i + 1}/3") + continue + return response.json() + raise ValueError(f"Failed to fetch data from {url}") + + +def _unify_table(table_high_prio: dict, table_low_prio: dict, template_table: dict): + result = {} + for key in template_table.keys(): + if key in table_high_prio: + if key in table_low_prio: + if table_high_prio[key] is None and table_low_prio[key] is not None: + result[key] = table_low_prio[key] + elif type(template_table[key]) == dict: + result[key] = _unify_table(table_high_prio[key], table_low_prio[key], template_table[key]) + else: + result[key] = table_high_prio[key] + else: + result[key] = table_high_prio[key] + else: + if key in table_low_prio: + result[key] = table_low_prio[key] + return result + + +def update_activity_gamekee_api(): + response = requests.get( + url="https://www.gamekee.com/v1/activity/query", + headers=HEADERS, + params={ + "active_at": int(time.time()) + } + ) + if response.status_code != 200 or response.json()["code"] != 0: + print(f"Failed to fetch activity data, error code: {response.status_code} | {response.json()['code']}") + return None + + pub_area_translator = {"日服": "JP", "国际服": "Global", "国服": "CN"} + result = copy.deepcopy(FULL_RESULT_FORMAT) + + activity_data = response.json()["data"] + for activity in activity_data: + current_time = int(time.time()) + if current_time < activity["begin_at"] or current_time > activity["end_at"]: + # Skip events that are not active + continue + pub_area = pub_area_translator[activity["pub_area"]] + title = activity["title"] + + activity_info = { + "name": title, + "begin_time": str(activity["begin_at"]), + "end_time": str(activity["end_at"]) + } + + if title.startswith("[活动]"): + result[pub_area]["Events"].append(activity_info) + elif title.startswith("总力战"): + result[pub_area]["Raids"]["total_assault"] = activity_info + elif title.startswith("大决战"): + result[pub_area]["Raids"]["grand_assault"] = activity_info + elif title.startswith("制约解除决战"): + result[pub_area]["Raids"]["limit_break_assault"] = activity_info + elif "演习" in title or "战术" in title or "考试" in title: + result[pub_area]["Raids"]["joint_firing_drill"] = activity_info + else: + if "特别依赖" in title or "特殊任务" in title or "特别委托" in title: + result[pub_area]["Rewards"]["commissions_rewards"] = int(title[-2]) + if "学园交流会" in title: + result[pub_area]["Rewards"]["scrimmage_rewards"] = int(title[-2]) + if "指名手配" in title or "悬赏通缉" in title: + result[pub_area]["Rewards"]["bounty_hunts_rewards"] = int(title[-2]) + if "日程" in title or "课程表" in title: + result[pub_area]["Rewards"]["schedule_rewards"] = int(title[-2]) + if "任务(Normal)" in title or "任务(普通难度)" in title: + result[pub_area]["Rewards"]["normal_mission_rewards"] = int(title[-2]) + if "任务(Hard)" in title or "任务(困难难度)" in title: + result[pub_area]["Rewards"]["hard_mission_rewards"] = int(title[-2]) + if "老师等级经验值" in title or "帐号经验值" in title: + result[pub_area]["Rewards"]["level_exp_rewards"] = int(title[-2]) + return result + + +def update_activity_bawiki(): + events_url = "https://bluearchive.wiki/wiki/Events" + total_assault_url = "https://bluearchive.wiki/wiki/Total_Assault" + grand_assault_url = "https://bluearchive.wiki/wiki/Grand_Assault" + limit_break_assault_url = "https://bluearchive.wiki/wiki/Limit_Break_Assault" + joint_firing_drill_url = "https://bluearchive.wiki/wiki/Joint_Firing_Drill" + + # ------------------------------------------------------------------------ + # Fetch event schedules and reward campaigns from Events page + # ------------------------------------------------------------------------ + events_response = requests.get( + url=events_url, + headers=HEADERS + ) + events_html = etree.HTML(events_response.content.decode('utf-8'), parser=etree.HTMLParser(encoding='utf-8')) + JP_event_schedules = _fetch_activity_table(events_html, '//*[@id="tabber-Japanese_version"]/table', + force_list=True) + Global_event_schedules = _fetch_activity_table(events_html, '//*[@id="tabber-Global_version"]/table', + force_list=True) + # JP_mini_events = _fetch_activity_table(events_html, '//*[@id="tabber-Japanese_version_2"]/table') + # Global_mini_events = _fetch_activity_table(events_html, '//*[@id="tabber-Global_version_2"]/table') + reward_campaigns = _fetch_activity_table(events_html, '//h1[@id="Reward_campaigns"]/following::table[1]', + force_list=True) + + # ------------------------------------------------------------------------ + # Fetch total assaults from its page + # ------------------------------------------------------------------------ + total_assault_response = requests.get( + url=total_assault_url, + headers=HEADERS + ) + total_assault_html = etree.HTML(total_assault_response.content.decode('utf-8'), + parser=etree.HTMLParser(encoding='utf-8')) + JP_total_assault = _fetch_activity_table(total_assault_html, '//*[@id="tabber-JP"]/table') + Global_total_assault = _fetch_activity_table(total_assault_html, '//*[@id="tabber-Global"]/table') + + # ------------------------------------------------------------------------ + # Fetch grand assaults from its page + # ------------------------------------------------------------------------ + grand_assault_response = requests.get( + url=grand_assault_url, + headers=HEADERS + ) + grand_assault_html = etree.HTML(grand_assault_response.content.decode('utf-8'), + parser=etree.HTMLParser(encoding='utf-8')) + JP_grand_assault = _fetch_activity_table(grand_assault_html, '//*[@id="tabber-JP"]/table') + Global_grand_assault = _fetch_activity_table(grand_assault_html, '//*[@id="tabber-Global"]/table') + + # ------------------------------------------------------------------------ + # Fetch limit break assaults from its page + # ------------------------------------------------------------------------ + limit_break_assault_response = requests.get( + url=limit_break_assault_url, + headers=HEADERS + ) + limit_break_assault_html = etree.HTML(limit_break_assault_response.content.decode('utf-8'), + parser=etree.HTMLParser(encoding='utf-8')) + JP_limit_break_assault = _fetch_activity_table(limit_break_assault_html, '//*[@id="tabber-JP"]/table') + Global_limit_break_assault = _fetch_activity_table(limit_break_assault_html, '//*[@id="tabber-Global"]/table') + + # ------------------------------------------------------------------------ + # Fetch joint firing drills from its page + # ------------------------------------------------------------------------ + joint_firing_drill_response = requests.get( + url=joint_firing_drill_url, + headers=HEADERS + ) + joint_firing_drill_html = etree.HTML(joint_firing_drill_response.content.decode('utf-8'), + parser=etree.HTMLParser(encoding='utf-8')) + JP_joint_firing_drill = _fetch_activity_table(joint_firing_drill_html, '//*[@id="tabber-Japanese_version"]/table') + + Global_joint_firing_drill = _fetch_activity_table(joint_firing_drill_html, '//*[@id="tabber-Global_version"]/table') + + result = copy.deepcopy(FULL_RESULT_FORMAT) + result.pop("CN") # BAWiki does not offer CN server's activity data + result["Global"].pop("Rewards") # BAWiki does not offer Global server's reward campaigns data + for item in reward_campaigns: + name = item["Name"] + if "Commissions rewards" in name: + result["JP"]["Rewards"]["commissions_rewards"] = 2 if "Double" in name else 3 + elif "Scrimmage rewards" in name: + result["JP"]["Rewards"]["scrimmage_rewards"] = 2 if "Double" in name else 3 + elif "Bounty Hunts rewards" in name: + result["JP"]["Rewards"]["bounty_hunts_rewards"] = 2 if "Double" in name else 3 + elif "Schedule rewards" in name: + result["JP"]["Rewards"]["schedule_rewards"] = 2 if "Double" in name else 3 + elif "Normal Missions rewards" in name: + result["JP"]["Rewards"]["normal_mission_rewards"] = 2 if "Double" in name else 3 + elif "Hard Missions rewards" in name: + result["JP"]["Rewards"]["hard_mission_rewards"] = 2 if "Double" in name else 3 + elif "level EXP" in name: + result["JP"]["Rewards"]["level_exp_rewards"] = 2 if "Double" in name else 3 + result["JP"]["Events"], result["JP"]["Raids"]["total_assault"], result["JP"]["Raids"]["grand_assault"], \ + result["JP"]["Raids"]["limit_break_assault"], result["JP"]["Raids"]["joint_firing_drill"] = \ + JP_event_schedules, JP_total_assault, JP_grand_assault, JP_limit_break_assault, JP_joint_firing_drill + result["Global"]["Events"], result["Global"]["Raids"]["total_assault"], result["Global"]["Raids"]["grand_assault"], \ + result["Global"]["Raids"]["limit_break_assault"], result["Global"]["Raids"]["joint_firing_drill"] = \ + Global_event_schedules, Global_total_assault, Global_grand_assault, Global_limit_break_assault, \ + Global_joint_firing_drill + return result + + +def update_activity_schaledb(localization="en"): + events_json = _download_json(f"https://schaledb.com/data/{localization}/events.min.json") + raids_json = _download_json(f"https://schaledb.com/data/{localization}/raids.min.json") + localization_json = _download_json(f"https://schaledb.com/data/{localization}/localization.min.json") + config_json = _download_json("https://schaledb.com/data/config.min.json") + + localization_dict = { + "Events": {}, + "Raid": {}, + "EliminateRaid": {}, + "MultiFloorRaid": {}, + "TimeAttack": {} + } + raid_type_translator = { + "Raid": "total_assault", + "EliminateRaid": "grand_assault", + "MultiFloorRaid": "limit_break_assault", + "TimeAttack": "joint_firing_drill" + } + pub_area_translator = {"Jp": "JP", "Global": "Global", "Cn": "CN"} + + ########################################## + # prepare localization dict + ########################################## + + # get event name from localization + localization_dict["Events"] = localization_json["EventName"] + + # total assault, grand assault, limit break assault has the same structure + # total assault and grand assault share the same data source + for type in ["Raid", "MultiFloorRaid"]: + for event in raids_json[type]: + raid_id = event["Id"] + raid_name = event["Name"] + localization_dict[type][raid_id] = raid_name + localization_dict["EliminateRaid"] = localization_dict["Raid"] + + for joint_firing_drill in raids_json["TimeAttack"]: + id = joint_firing_drill["Id"] + joint_firing_drill_type = joint_firing_drill["DungeonType"] + name = localization_json["TimeAttackStage"][joint_firing_drill_type] + localization_dict["TimeAttack"][id] = name + + ########################################## + # generate result table + ########################################## + result = copy.deepcopy(FULL_RESULT_FORMAT) + # SchaleDB does not offer reward campaigns data + result["JP"].pop("Rewards") + result["Global"].pop("Rewards") + result["CN"].pop("Rewards") + + for server_config in config_json["Regions"]: + server = pub_area_translator[server_config["Name"]] + for event in server_config["CurrentEvents"]: + id = str(event["event"]) + + # often SchaleDB mark 10 prefix as a rerun event, we only keep the last 3 digits for localization + if len(id) > 3: + id = id[-3:] + begin_time = event["start"] + end_time = event["end"] + if begin_time <= time.time() <= end_time: + result[server]["Events"].append({ + "name": localization_dict["Events"][id], + "begin_time": str(begin_time), + "end_time": str(end_time) + }) + for event in server_config["CurrentRaid"]: + raid_type = event["type"] + id = event["raid"] + start_time = event["start"] + end_time = event["end"] + current_time = time.time() + if current_time < start_time or current_time > end_time: + continue + + result[server]["Raids"][raid_type_translator[raid_type]] = { + "name": localization_dict[raid_type][id], + "begin_time": str(start_time), + "end_time": str(end_time) + } + + return result + + +def update_activity(): + logger = logging.getLogger("activity_updater") + try: + logger.info("Retrieving activity data from Gamekee API...") + gamekee_response = update_activity_gamekee_api() + logger.info("Gamekee API Response:\n%s", json.dumps(gamekee_response, ensure_ascii=False, indent=4)) + + logger.info("Retrieving activity data from Blue Archive Wiki...") + bawiki_response = update_activity_bawiki() + logger.info("Blue Archive Wiki Response:\n%s", json.dumps(bawiki_response, ensure_ascii=False, indent=4)) + + logger.info("Retrieving activity data from SchaleDB...") + schaledb_response = update_activity_schaledb() + logger.info("SchaleDB Response:\n%s", json.dumps(schaledb_response, ensure_ascii=False, indent=4)) + + # For final result, we prefer BAWiki > SchaleDB > Gamekee API + logger.info("Merging activity data...") + final_result = _unify_table(bawiki_response, schaledb_response, FULL_RESULT_FORMAT) + final_result = _unify_table(final_result, gamekee_response, FULL_RESULT_FORMAT) + + logger.info("Final Result:\n%s", json.dumps(final_result, ensure_ascii=False, indent=4)) + return final_result + except Exception as e: + logger.error(f"An error occurred during the activity update process: {e}") + sys.exit(1) + + +if __name__ == "__main__": + logger = logging.getLogger("activity_updater") + logger.setLevel(logging.DEBUG) + log_formatter = logging.Formatter('<%(asctime)s> [%(levelname)s] <%(message)s>', datefmt='%Y-%m-%d %H:%M:%S') + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_formatter) + logger.addHandler(console_handler) + + activity_json_path = os.path.join(os.path.dirname(__file__), "activity.json") + tmp_json_path = os.path.join(os.path.dirname(__file__), "tmp_activity.json") + pr_body_md_path = os.path.join(os.path.dirname(__file__), "activity_update_log.md") + + logger.info("Starting activity information retrieval and merging process...") + final_result = update_activity() + logger.info("Activity information retrieval and merging process completed.") + # determine if any data is updated + if os.path.exists(activity_json_path): + with open(activity_json_path, "r", encoding="utf-8") as f: + original_data = json.load(f) + original_data.pop("last_update_time") # remove last_update_time for comparison + if original_data == final_result: + logger.info("No updates in activity data, exiting...") + sys.exit(0) + logger.info("Changes detected in activity data, updating activity.json...") + final_result["last_update_time"] = int(time.time()) + ordered_keys = ["last_update_time", "JP", "Global", "CN"] + ordered_result = OrderedDict((k, final_result[k]) for k in ordered_keys if k in final_result) + with open(tmp_json_path, "w", encoding="utf-8") as f: + json.dump(ordered_result, f, ensure_ascii=False, indent=4) + logger.info("Updated data written to temporary file tmp_activity.json.") + + # generating pr body markdown + with open(pr_body_md_path, "w", encoding="utf-8") as f: + f.write("### Detected a change in activity data, details as follows:\n\n") + with open(activity_json_path, 'r', encoding='utf-8') as f1, open(tmp_json_path, 'r', encoding='utf-8') as f2: + old_data, new_data = f1.readlines(), f2.readlines() + + diff = difflib.unified_diff(old_data, new_data) + f.write("```diff\n") + f.writelines(diff) + f.write("\n```\n") + f.write("> All data are up-to-date. Requesting a review.\n") + repo_owner = os.environ.get('GITHUB_REPOSITORY_OWNER', 'repository-owner') + f.write(f"\n> **Repository Owner**: @{repo_owner}\n") + f.write(f"\n> Last update time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") + logger.info("Pull request body markdown generated at activity_update_log.md.") diff --git a/module/activities/requirements-activity-updater.txt b/module/activities/requirements-activity-updater.txt new file mode 100644 index 000000000..9166906f3 --- /dev/null +++ b/module/activities/requirements-activity-updater.txt @@ -0,0 +1,3 @@ +requests +lxml +tzdata diff --git a/module/dailyGameActivities/HinaSummerVacationAudioGame.py b/module/dailyGameActivities/HinaSummerVacationAudioGame.py index 424ff23f2..147b3d18e 100644 --- a/module/dailyGameActivities/HinaSummerVacationAudioGame.py +++ b/module/dailyGameActivities/HinaSummerVacationAudioGame.py @@ -3,7 +3,7 @@ from core import picture from core.color import rgb_in_range from core.device.screenshot.nemu import NemuScreenshot -from module.activities.PresidentHinasSummerVacation import to_activity +from module.activities.activitiy_data.PresidentHinasSummerVacation import to_activity midy = 302 diff --git a/pyproject.toml b/pyproject.toml index d2e7de806..0efa280d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,28 +1,31 @@ [project] name = "blue_archive_auto_script" -version = "1.1.3" +version = "1.4.3" description = "用于实现蔚蓝档案自动化" authors = [{ name = "pur1fying", email = "2274916027@qq.com" }] dependencies = [ "lmdb", - "PyQt5", - "numpy", - "cnocr", + "pyqt5==5.15.11", + "pyqt5-qt5==5.15.2", + "numpy < 2.0", + "av == 12.0.0", "imgaug", - "onnxruntime", - "adbutils<=2.2.1", - "uiautomator2", - "opencv-python-headless", - "PyQt-Fluent-Widgets", - "paddlepaddle", + "win11toast", + "adbutils == 2.9.3", + "uiautomator2 == 3.4.0", + "opencv-python == 4.8.1.78", + "PyQt-Fluent-Widgets == 1.2.0", + "pyinstaller", + "requests", + "dulwich", + "psutil", + "tqdm", + "tomli == 2.2.1", + "tomli_w == 1.2.0", + "PyAutoGUI == 0.9.54", + "mss==10.0.0", + "tzdata" ] requires-python = ">=3.9" readme = "README.md" license = { text = "GPL-3.0-only" } - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm] -distribution = true diff --git a/requirements-linux.txt b/requirements-linux.txt index f1331e62d..9f6ef500e 100644 --- a/requirements-linux.txt +++ b/requirements-linux.txt @@ -4,8 +4,8 @@ numpy < 2.0 av == 12.0.0 imgaug dulwich -adbutils == 2.2.1 -uiautomator2 == 2.16.23 +adbutils == 2.9.3 +uiautomator2 == 3.4.0 opencv-python-headless PyQt-Fluent-Widgets psutil diff --git a/requirements.txt b/requirements.txt index ef13e5c79..7b7c3bae5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,9 +4,10 @@ numpy < 2.0 av == 12.0.0 imgaug win11toast -adbutils == 2.2.1 -uiautomator2 == 2.16.23 +adbutils == 2.9.3 +uiautomator2 == 3.4.0 opencv-python == 4.8.1.78 +lxml PyQt-Fluent-Widgets == 1.2.0 # gevent diff --git a/src/atx_app/ATX.apk b/src/atx_app/ATX.apk deleted file mode 100644 index e8d5c2549..000000000 Binary files a/src/atx_app/ATX.apk and /dev/null differ diff --git a/src/atx_app/app-uiautomator-test.apk b/src/atx_app/app-uiautomator-test.apk deleted file mode 100644 index 17bd64445..000000000 Binary files a/src/atx_app/app-uiautomator-test.apk and /dev/null differ diff --git a/src/atx_app/app-uiautomator.apk b/src/atx_app/app-uiautomator.apk deleted file mode 100644 index e8d5c2549..000000000 Binary files a/src/atx_app/app-uiautomator.apk and /dev/null differ diff --git a/src/atx_app/atx-agent_0.10.0_linux_386/atx-agent b/src/atx_app/atx-agent_0.10.0_linux_386/atx-agent deleted file mode 100644 index fbddb8ca9..000000000 Binary files a/src/atx_app/atx-agent_0.10.0_linux_386/atx-agent and /dev/null differ diff --git a/src/atx_app/atx-agent_0.10.0_linux_amd64/atx-agent b/src/atx_app/atx-agent_0.10.0_linux_amd64/atx-agent deleted file mode 100644 index 4469cc6ff..000000000 Binary files a/src/atx_app/atx-agent_0.10.0_linux_amd64/atx-agent and /dev/null differ diff --git a/src/atx_app/atx-agent_0.10.0_linux_arm64/atx-agent b/src/atx_app/atx-agent_0.10.0_linux_arm64/atx-agent deleted file mode 100644 index e7244eea2..000000000 Binary files a/src/atx_app/atx-agent_0.10.0_linux_arm64/atx-agent and /dev/null differ diff --git a/src/atx_app/atx-agent_0.10.0_linux_armv6/atx-agent b/src/atx_app/atx-agent_0.10.0_linux_armv6/atx-agent deleted file mode 100644 index b8c454b80..000000000 Binary files a/src/atx_app/atx-agent_0.10.0_linux_armv6/atx-agent and /dev/null differ diff --git a/src/atx_app/atx-agent_0.10.0_linux_armv7/atx-agent b/src/atx_app/atx-agent_0.10.0_linux_armv7/atx-agent deleted file mode 100644 index 86405a6d6..000000000 Binary files a/src/atx_app/atx-agent_0.10.0_linux_armv7/atx-agent and /dev/null differ diff --git a/src/images/Global_zh-tw/normal_task/preset_column.png b/src/images/Global_zh-tw/normal_task/preset_column.png new file mode 100644 index 000000000..4d53917c1 Binary files /dev/null and b/src/images/Global_zh-tw/normal_task/preset_column.png differ