chore: update path in scripts
This commit is contained in:
parent
747966e6db
commit
946c5b4112
|
|
@ -11,7 +11,7 @@ from fetch import (
|
||||||
get_rules,
|
get_rules,
|
||||||
)
|
)
|
||||||
|
|
||||||
from filetools import _file_path
|
from filetools import workspace_path
|
||||||
|
|
||||||
|
|
||||||
def test_get_paper_urls():
|
def test_get_paper_urls():
|
||||||
|
|
@ -40,7 +40,7 @@ def _normalize(iterable):
|
||||||
|
|
||||||
def _description_parsing_cases():
|
def _description_parsing_cases():
|
||||||
with open(
|
with open(
|
||||||
_file_path("description_parsing_cases.json"),
|
workspace_path("scripts", "description_parsing_cases.json"),
|
||||||
"r",
|
"r",
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
) as f:
|
) as f:
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import os
|
||||||
|
|
||||||
__dirname__ = os.path.abspath(os.path.dirname(__file__))
|
__dirname__ = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
def workspace_path(*other):
|
||||||
|
|
||||||
def _file_path(*other):
|
return os.path.join(os.path.dirname(__dirname__), *other)
|
||||||
|
|
||||||
return os.path.abspath(os.path.join(__dirname__, *other))
|
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ from tqdm import tqdm
|
||||||
|
|
||||||
from fetch import CustomJSONEncoder, fetch_holiday
|
from fetch import CustomJSONEncoder, fetch_holiday
|
||||||
from generate_ics import generate_ics
|
from generate_ics import generate_ics
|
||||||
|
from filetools import workspace_path
|
||||||
|
|
||||||
|
|
||||||
class ChinaTimezone(tzinfo):
|
class ChinaTimezone(tzinfo):
|
||||||
|
|
@ -30,19 +31,13 @@ class ChinaTimezone(tzinfo):
|
||||||
return timedelta()
|
return timedelta()
|
||||||
|
|
||||||
|
|
||||||
__dirname__ = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
def _file_path(*other):
|
|
||||||
|
|
||||||
return os.path.join(__dirname__, *other)
|
|
||||||
|
|
||||||
|
|
||||||
def update_data(year: int) -> Iterator[str]:
|
def update_data(year: int) -> Iterator[str]:
|
||||||
"""Update and store data for a year."""
|
"""Update and store data for a year."""
|
||||||
|
|
||||||
json_filename = _file_path(f"{year}.json")
|
json_filename = workspace_path(f"{year}.json")
|
||||||
ics_filename = _file_path(f"{year}.ics")
|
ics_filename = workspace_path(f"{year}.ics")
|
||||||
with open(json_filename, "w", encoding="utf-8", newline="\n") as f:
|
with open(json_filename, "w", encoding="utf-8", newline="\n") as f:
|
||||||
data = fetch_holiday(year)
|
data = fetch_holiday(year)
|
||||||
|
|
||||||
|
|
@ -74,14 +69,14 @@ def update_data(year: int) -> Iterator[str]:
|
||||||
def update_main_ics(fr_year, to_year):
|
def update_main_ics(fr_year, to_year):
|
||||||
all_days = []
|
all_days = []
|
||||||
for year in range(fr_year, to_year + 1):
|
for year in range(fr_year, to_year + 1):
|
||||||
filename = _file_path(f"{year}.json")
|
filename = workspace_path(f"{year}.json")
|
||||||
if not os.path.isfile(filename):
|
if not os.path.isfile(filename):
|
||||||
continue
|
continue
|
||||||
with open(filename, "r", encoding="utf8") as inf:
|
with open(filename, "r", encoding="utf8") as inf:
|
||||||
data = json.loads(inf.read())
|
data = json.loads(inf.read())
|
||||||
all_days.extend(data.get("days"))
|
all_days.extend(data.get("days"))
|
||||||
|
|
||||||
filename = _file_path("holiday-cn.ics")
|
filename = workspace_path("holiday-cn.ics")
|
||||||
generate_ics(
|
generate_ics(
|
||||||
all_days,
|
all_days,
|
||||||
filename,
|
filename,
|
||||||
|
|
@ -147,8 +142,8 @@ def main():
|
||||||
temp_note_fd, temp_note_name = mkstemp()
|
temp_note_fd, temp_note_name = mkstemp()
|
||||||
with open(temp_note_fd, "w", encoding="utf-8") as f:
|
with open(temp_note_fd, "w", encoding="utf-8") as f:
|
||||||
f.write(tag + "\n\n```diff\n" + diff + "\n```\n")
|
f.write(tag + "\n\n```diff\n" + diff + "\n```\n")
|
||||||
os.makedirs(_file_path("dist"), exist_ok=True)
|
os.makedirs(workspace_path("dist"), exist_ok=True)
|
||||||
zip_path = _file_path("dist", f"holiday-cn-{tag}.zip")
|
zip_path = workspace_path("dist", f"holiday-cn-{tag}.zip")
|
||||||
pack_data(zip_path)
|
pack_data(zip_path)
|
||||||
|
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
|
|
@ -171,10 +166,10 @@ def pack_data(file):
|
||||||
"""Pack data json in zip file."""
|
"""Pack data json in zip file."""
|
||||||
|
|
||||||
zip_file = ZipFile(file, "w")
|
zip_file = ZipFile(file, "w")
|
||||||
for i in os.listdir(__dirname__):
|
for i in os.listdir(workspace_path()):
|
||||||
if not re.match(r"\d+\.json", i):
|
if not re.match(r"\d+\.json", i):
|
||||||
continue
|
continue
|
||||||
zip_file.write(_file_path(i), i)
|
zip_file.write(workspace_path(i), i)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user