Add generated TRaSH-Guides with scripts
This commit is contained in:
parent
8e15e62281
commit
0857ea9dc6
885 changed files with 13936 additions and 34503 deletions
|
|
@ -1,157 +0,0 @@
|
|||
import yaml
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone, date
|
||||
|
||||
|
||||
class DateTimeEncoder(json.JSONEncoder):
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def parse_frontmatter(content):
|
||||
"""Parse YAML frontmatter from markdown"""
|
||||
if content.startswith("---"):
|
||||
try:
|
||||
second_sep = content[3:].find("---")
|
||||
if second_sep != -1:
|
||||
yaml_text = content[3:second_sep + 3]
|
||||
meta = yaml.safe_load(yaml_text)
|
||||
content = content[second_sep + 6:].strip()
|
||||
return meta, content
|
||||
except yaml.YAMLError:
|
||||
pass
|
||||
return {}, content
|
||||
|
||||
|
||||
def load_regex_patterns():
|
||||
"""Load all regex patterns from the regex_patterns folder"""
|
||||
patterns = {}
|
||||
pattern_folder = Path("regex_patterns")
|
||||
if pattern_folder.exists():
|
||||
for yml_file in pattern_folder.glob("*.yml"):
|
||||
try:
|
||||
with open(yml_file, encoding='utf-8') as f:
|
||||
pattern_data = yaml.safe_load(f)
|
||||
patterns[yml_file.stem] = pattern_data.get('pattern')
|
||||
except Exception as e:
|
||||
print(f"Error loading regex pattern {yml_file}: {e}")
|
||||
return patterns
|
||||
|
||||
|
||||
def resolve_patterns_in_conditions(conditions, patterns):
|
||||
"""Resolve regex patterns in conditions"""
|
||||
pattern_types = ['release_title', 'release_group', 'edition']
|
||||
|
||||
for condition in conditions:
|
||||
if condition.get('type') in pattern_types:
|
||||
# For these types, pattern field is always a reference to regex_patterns
|
||||
pattern_name = condition.get('pattern')
|
||||
if pattern_name and pattern_name in patterns:
|
||||
condition['pattern'] = patterns[pattern_name]
|
||||
return conditions
|
||||
|
||||
|
||||
def bundle_markdown(folder_name):
|
||||
"""Bundle markdown files with frontmatter"""
|
||||
data = []
|
||||
folder_path = Path(folder_name)
|
||||
if folder_path.exists():
|
||||
# Sort files to ensure consistent ordering
|
||||
for md_file in sorted(folder_path.glob("*.md")):
|
||||
try:
|
||||
with open(md_file, encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
meta, content = parse_frontmatter(content)
|
||||
# Ensure required fields exist
|
||||
if 'title' not in meta:
|
||||
meta['title'] = md_file.stem.replace('-', ' ').title()
|
||||
if 'author' not in meta:
|
||||
meta['author'] = 'Unknown'
|
||||
item = {
|
||||
"_id":
|
||||
md_file.stem,
|
||||
"content":
|
||||
content,
|
||||
"last_modified":
|
||||
datetime.fromtimestamp(md_file.stat().st_mtime,
|
||||
tz=timezone.utc).isoformat(),
|
||||
**meta
|
||||
}
|
||||
data.append(item)
|
||||
except Exception as e:
|
||||
print(f"Error processing {md_file}: {e}")
|
||||
return data
|
||||
|
||||
|
||||
def bundle_folder(folder_name, regex_patterns=None):
|
||||
"""Bundle files based on type"""
|
||||
markdown_folders = ["wiki", "dev_logs"]
|
||||
|
||||
if folder_name in markdown_folders:
|
||||
return bundle_markdown(folder_name)
|
||||
|
||||
data = []
|
||||
folder_path = Path(folder_name)
|
||||
if folder_path.exists():
|
||||
# Sort files to ensure consistent ordering
|
||||
for yml_file in sorted(folder_path.glob("*.yml")):
|
||||
try:
|
||||
with open(yml_file, encoding='utf-8') as f:
|
||||
item = yaml.safe_load(f)
|
||||
item["_id"] = yml_file.stem
|
||||
|
||||
# If this is a custom format, resolve its patterns
|
||||
if folder_name == "custom_formats" and regex_patterns:
|
||||
if "conditions" in item:
|
||||
item[
|
||||
"conditions"] = resolve_patterns_in_conditions(
|
||||
item["conditions"], regex_patterns)
|
||||
|
||||
data.append(item)
|
||||
except Exception as e:
|
||||
print(f"Error processing {yml_file}: {e}")
|
||||
return data
|
||||
|
||||
|
||||
def main():
|
||||
# Create bundles directory
|
||||
Path("bundles").mkdir(exist_ok=True)
|
||||
|
||||
# Load regex patterns first
|
||||
print("Loading regex patterns...")
|
||||
regex_patterns = load_regex_patterns()
|
||||
|
||||
# Define folders to bundle
|
||||
folders = [
|
||||
"custom_formats", "profiles", "regex_patterns", "group_tiers",
|
||||
"dev_logs", "wiki"
|
||||
]
|
||||
|
||||
# Bundle each folder
|
||||
for folder in folders:
|
||||
print(f"Processing {folder}...")
|
||||
data = bundle_folder(folder,
|
||||
regex_patterns=regex_patterns
|
||||
if folder == "custom_formats" else None)
|
||||
bundle_path = f"bundles/{folder}.json"
|
||||
with open(bundle_path, "w", encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2, cls=DateTimeEncoder)
|
||||
print(f"Created {bundle_path} with {len(data)} items")
|
||||
|
||||
# Create version file
|
||||
version = {
|
||||
"updated_at": datetime.now(timezone.utc).isoformat(),
|
||||
"folders": folders
|
||||
}
|
||||
with open("bundles/version.json", "w", encoding='utf-8') as f:
|
||||
json.dump(version, f, indent=2, cls=DateTimeEncoder)
|
||||
|
||||
print("Bundle creation complete!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
61
scripts/generate.py
Normal file
61
scripts/generate.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.13"
|
||||
# dependencies = [
|
||||
# "pyyaml",
|
||||
# ]
|
||||
# ///
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from utils.custom_formats import collect_custom_formats
|
||||
from utils.regex_patterns import collect_regex_patterns
|
||||
from utils.profiles import collect_profiles
|
||||
|
||||
def clear_output_dir(output_dir):
|
||||
if not os.path.exists(output_dir):
|
||||
print(f"Output directory does not exist, skipping clearing")
|
||||
else:
|
||||
for filename in os.listdir(output_dir):
|
||||
file_path = os.path.join(output_dir, filename)
|
||||
os.remove(file_path)
|
||||
print(f"Cleared output directory: {output_dir}")
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print("Usage: python generate.py <input_dir> <output_dir>")
|
||||
sys.exit(1)
|
||||
input_dir = sys.argv[1]
|
||||
output_dir = sys.argv[2]
|
||||
|
||||
regex_patterns_dir = os.path.join(output_dir, 'regex_patterns')
|
||||
os.makedirs(regex_patterns_dir, exist_ok=True)
|
||||
clear_output_dir(regex_patterns_dir)
|
||||
|
||||
custom_formats_dir = os.path.join(output_dir, 'custom_formats')
|
||||
os.makedirs(custom_formats_dir, exist_ok=True)
|
||||
clear_output_dir(custom_formats_dir)
|
||||
|
||||
profiles_dir = os.path.join(output_dir, 'profiles')
|
||||
os.makedirs(profiles_dir, exist_ok=True)
|
||||
clear_output_dir(profiles_dir)
|
||||
|
||||
# TODO: Support Sonarr
|
||||
for service in ['radarr']:
|
||||
trash_custom_formats_dir = os.path.join(input_dir, f"{service}/cf")
|
||||
if not os.path.exists(trash_custom_formats_dir):
|
||||
print(f"Custom format directory {trash_custom_formats_dir} does not exist, skipping.")
|
||||
continue
|
||||
|
||||
trash_profiles_dir = os.path.join(input_dir, f"{service}/quality-profiles")
|
||||
if not os.path.exists(trash_profiles_dir):
|
||||
print(f"Custom format directory {trash_profiles_dir} does not exist, skipping.")
|
||||
continue
|
||||
|
||||
collect_regex_patterns(service, trash_custom_formats_dir, regex_patterns_dir)
|
||||
collect_custom_formats(service, trash_custom_formats_dir, custom_formats_dir)
|
||||
collect_profiles(service, trash_profiles_dir, profiles_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,210 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
|
||||
|
||||
def load_template(template_path):
|
||||
"""Load a YAML template file."""
|
||||
try:
|
||||
with open(template_path, 'r') as f:
|
||||
return yaml.safe_load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Template file not found: {template_path}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_regex_pattern(group_name,
|
||||
template_dir,
|
||||
output_dir,
|
||||
dry_run=False,
|
||||
show_preview=False):
|
||||
"""Create a regex pattern file for a release group if it doesn't exist."""
|
||||
output_path = output_dir / f"{group_name}.yml"
|
||||
|
||||
# Skip if pattern already exists
|
||||
if output_path.exists():
|
||||
print(f"Skipping existing regex pattern: {output_path}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"{'Would create' if dry_run else 'Creating'} regex pattern: {output_path}"
|
||||
)
|
||||
|
||||
# Load and fill template
|
||||
template = load_template(template_dir / "releaseGroup.yml")
|
||||
template['name'] = group_name
|
||||
template['pattern'] = f"(?<=^|[\\s.-]){group_name}\\b"
|
||||
|
||||
# Show preview in dry run mode if this is the first pattern
|
||||
if dry_run and show_preview:
|
||||
print("\nPreview of first regex pattern:")
|
||||
print("---")
|
||||
print(
|
||||
yaml.dump(template,
|
||||
sort_keys=False,
|
||||
default_flow_style=False,
|
||||
indent=2))
|
||||
print("---\n")
|
||||
|
||||
# Create output directory if it doesn't exist
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write pattern file if not dry run
|
||||
if not dry_run:
|
||||
with open(output_path, 'w') as f:
|
||||
yaml.dump(template, f, sort_keys=False)
|
||||
|
||||
|
||||
def create_tier_format(tier,
|
||||
resolution,
|
||||
type_name,
|
||||
groups,
|
||||
template_dir,
|
||||
output_dir,
|
||||
dry_run=False,
|
||||
show_preview=False):
|
||||
"""Create a custom format file for a specific tier."""
|
||||
# Get groups for this tier
|
||||
tier_groups = [group["name"] for group in groups if group["tier"] == tier]
|
||||
if not tier_groups:
|
||||
return
|
||||
|
||||
# Load and fill template
|
||||
template = load_template(template_dir / "groupTier.yml")
|
||||
|
||||
# Replace template variables
|
||||
template['name'] = f"{resolution} {type_name} Tier {tier}"
|
||||
template[
|
||||
'description'] = f"Matches release groups who fall under {resolution} {type_name} Tier {tier}"
|
||||
|
||||
# Find and update resolution condition
|
||||
for condition in template['conditions']:
|
||||
if condition.get('resolution'):
|
||||
condition['name'] = resolution
|
||||
condition['resolution'] = resolution
|
||||
|
||||
# Add release group conditions
|
||||
for group_name in tier_groups:
|
||||
release_group_condition = {
|
||||
'name': group_name,
|
||||
'negate': False,
|
||||
'pattern': group_name,
|
||||
'required': False,
|
||||
'type': 'release_group'
|
||||
}
|
||||
template['conditions'].append(release_group_condition)
|
||||
|
||||
# Ensure tests is an empty list, not null
|
||||
template['tests'] = []
|
||||
|
||||
# Create output directory if it doesn't exist
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write custom format file
|
||||
output_path = output_dir / f"{resolution} {type_name} Tier {tier}.yml"
|
||||
existing = "Overwriting" if output_path.exists() else "Creating"
|
||||
print(
|
||||
f"{'Would ' + existing.lower() if dry_run else existing} custom format: {output_path} (includes {len(tier_groups)} groups)"
|
||||
)
|
||||
|
||||
# Show preview in dry run mode if this is the first format
|
||||
if dry_run and show_preview:
|
||||
print("\nPreview of first custom format:")
|
||||
print("---")
|
||||
print(
|
||||
yaml.dump(template,
|
||||
sort_keys=False,
|
||||
default_flow_style=False,
|
||||
indent=2))
|
||||
print("---\n")
|
||||
|
||||
if not dry_run:
|
||||
with open(output_path, 'w') as f:
|
||||
yaml.dump(template,
|
||||
f,
|
||||
sort_keys=False,
|
||||
default_flow_style=False,
|
||||
indent=2)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Create Radarr custom formats for release group tiers')
|
||||
parser.add_argument('json_file',
|
||||
help='Input JSON file containing tier data')
|
||||
parser.add_argument('--resolution',
|
||||
choices=['SD', '720p', '1080p', '2160p'],
|
||||
required=True,
|
||||
help='Resolution for custom formats')
|
||||
parser.add_argument('--type',
|
||||
choices=['Quality', 'Balanced'],
|
||||
required=True,
|
||||
help='Type of custom format')
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would be done without making any changes')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup paths
|
||||
script_dir = Path(__file__).parent
|
||||
template_dir = script_dir.parent / "templates"
|
||||
regex_dir = script_dir.parent / "regex_patterns"
|
||||
format_dir = script_dir.parent / "custom_formats"
|
||||
|
||||
# Load and parse input JSON
|
||||
try:
|
||||
with open(args.json_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Input JSON file not found: {args.json_file}")
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError:
|
||||
print(f"Error: Invalid JSON file: {args.json_file}")
|
||||
sys.exit(1)
|
||||
|
||||
# Print summary of what we found
|
||||
print(f"\nAnalyzing input file: {args.json_file}")
|
||||
print(
|
||||
f"Found {len(data['tiered_groups'])} release groups across {len(set(group['tier'] for group in data['tiered_groups']))} tiers"
|
||||
)
|
||||
|
||||
if args.dry_run:
|
||||
print("\nDRY RUN - No files will be created or modified\n")
|
||||
|
||||
# Create regex patterns for all groups
|
||||
print("\nProcessing regex patterns:")
|
||||
for i, group in enumerate(data["tiered_groups"]):
|
||||
create_regex_pattern(group["name"],
|
||||
template_dir,
|
||||
regex_dir,
|
||||
args.dry_run,
|
||||
show_preview=(i == 0))
|
||||
|
||||
# Create tier formats
|
||||
print("\nProcessing custom formats:")
|
||||
unique_tiers = sorted(set(group["tier"]
|
||||
for group in data["tiered_groups"]))
|
||||
for i, tier in enumerate(unique_tiers):
|
||||
create_tier_format(tier,
|
||||
args.resolution,
|
||||
args.type,
|
||||
data["tiered_groups"],
|
||||
template_dir,
|
||||
format_dir,
|
||||
args.dry_run,
|
||||
show_preview=(i == 0))
|
||||
|
||||
print(
|
||||
f"\nSuccessfully {'simulated' if args.dry_run else 'created'} custom formats for {args.resolution} {args.type}"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
scripts/utils/__init__.py
Normal file
0
scripts/utils/__init__.py
Normal file
BIN
scripts/utils/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
scripts/utils/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
scripts/utils/__pycache__/custom_formats.cpython-313.pyc
Normal file
BIN
scripts/utils/__pycache__/custom_formats.cpython-313.pyc
Normal file
Binary file not shown.
BIN
scripts/utils/__pycache__/profiles.cpython-313.pyc
Normal file
BIN
scripts/utils/__pycache__/profiles.cpython-313.pyc
Normal file
Binary file not shown.
BIN
scripts/utils/__pycache__/regex_patterns.cpython-313.pyc
Normal file
BIN
scripts/utils/__pycache__/regex_patterns.cpython-313.pyc
Normal file
Binary file not shown.
BIN
scripts/utils/__pycache__/strings.cpython-313.pyc
Normal file
BIN
scripts/utils/__pycache__/strings.cpython-313.pyc
Normal file
Binary file not shown.
86
scripts/utils/custom_formats.py
Normal file
86
scripts/utils/custom_formats.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import json
|
||||
import yaml
|
||||
|
||||
IMPLEMENTATION_TO_TAG_MAPPING = {
|
||||
'ReleaseTitleSpecification': ['Release Title'],
|
||||
'ResolutionSpecification': ['Resolution'],
|
||||
'SourceSpecification': ['Source'],
|
||||
'LanguageSpecification': ['Language'],
|
||||
'ReleaseGroupSpecification': ['Release Group'],
|
||||
'IndexerFlagSpecification': ['Indexer Flag'],
|
||||
'QualityModifierSpecification': ['Quality Modifier'],
|
||||
'ReleaseTypeSpecification': ['Release Type'],
|
||||
}
|
||||
|
||||
IMPLEMENTATION_TO_TYPE_MAPPING = {
|
||||
'ReleaseTitleSpecification': 'release_title',
|
||||
'ResolutionSpecification': 'resolution',
|
||||
'SourceSpecification': 'source',
|
||||
'LanguageSpecification': 'language',
|
||||
'ReleaseGroupSpecification': 'release_group',
|
||||
'IndexerFlagSpecification': 'indexer_flag',
|
||||
'QualityModifierSpecification': 'quality_modifier',
|
||||
'ReleaseTypeSpecification': 'release_type',
|
||||
}
|
||||
|
||||
def collect_custom_format(service, file_name, input_json, output_dir):
|
||||
conditions = []
|
||||
for spec in input_json.get('specifications', []):
|
||||
condition = {
|
||||
'name': spec.get('name', ''),
|
||||
'negate': spec.get('negate', False),
|
||||
'required': spec.get('required', False),
|
||||
'type': IMPLEMENTATION_TO_TYPE_MAPPING.get(spec.get('implementation'), 'unknown'),
|
||||
}
|
||||
|
||||
implementation = spec.get('implementation')
|
||||
if implementation in ['ReleaseTitleSpecification', 'ReleaseGroupSpecification']:
|
||||
condition['pattern'] = spec.get('name', '')
|
||||
elif implementation in ['ResolutionSpecification']:
|
||||
condition['resolution'] = spec.get('fields', {}).get('value')
|
||||
elif implementation in ['SourceSpecification']:
|
||||
condition['source'] = spec.get('fields', {}).get('value')
|
||||
elif implementation in ['LanguageSpecification']:
|
||||
# TODO: exceptLanguage
|
||||
condition['language'] = spec.get('fields', {}).get('value')
|
||||
elif implementation in ['IndexerFlagSpecification']:
|
||||
condition['flag'] = spec.get('fields', {}).get('value')
|
||||
elif implementation in ['QualityModifierSpecification']:
|
||||
condition['qualityModifier'] = spec.get('fields', {}).get('value')
|
||||
elif implementation in ['ReleaseTypeSpecification']:
|
||||
condition['releaseType'] = spec.get('fields', {}).get('value')
|
||||
|
||||
conditions.append(condition)
|
||||
|
||||
# Compose YAML structure
|
||||
name = input_json.get('name', '')
|
||||
trash_id = input_json.get('trash_id', '')
|
||||
yml_data = {
|
||||
'name': name,
|
||||
'trash_id': trash_id,
|
||||
'trash_scores': input_json.get('trash_scores', {}),
|
||||
'description': f"""Custom format from TRaSH-Guides.
|
||||
https://trash-guides.info/{service.capitalize()}/{service.capitalize()}-collection-of-custom-formats/#{file_name}""",
|
||||
'tags': IMPLEMENTATION_TO_TAG_MAPPING[implementation],
|
||||
'conditions': conditions,
|
||||
'tests': []
|
||||
}
|
||||
|
||||
# Output path
|
||||
output_path = os.path.join(output_dir, f"{file_name}-{trash_id}.yml")
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(yml_data, f, sort_keys=False, allow_unicode=True)
|
||||
print(f"Generated: {output_path}")
|
||||
|
||||
def collect_custom_formats(service, input_dir, output_dir):
|
||||
for root, _, files in os.walk(input_dir):
|
||||
for filename in files:
|
||||
if not filename.endswith('.json'):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, filename)
|
||||
file_stem = os.path.splitext(filename)[0] # Filename without extension
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
collect_custom_format(service, file_stem, data, output_dir)
|
||||
115
scripts/utils/profiles.py
Normal file
115
scripts/utils/profiles.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import os
|
||||
import json
|
||||
import yaml
|
||||
|
||||
def find_score_for_custom_format(trash_score_set, custom_format_name, trash_id, output_dir):
|
||||
custom_formats_dir = os.path.join(output_dir, '..', 'custom_formats')
|
||||
target_file = None
|
||||
for fname in os.listdir(custom_formats_dir):
|
||||
if fname.endswith('.yml') and trash_id in fname:
|
||||
target_file = os.path.join(custom_formats_dir, fname)
|
||||
break
|
||||
|
||||
if not target_file or not os.path.exists(target_file):
|
||||
print(f"Custom format with trash_id {trash_id} not found in {custom_formats_dir}")
|
||||
return 0
|
||||
|
||||
with open(target_file, 'r', encoding='utf-8') as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
if not data or 'trash_id' not in data:
|
||||
print(f"Invalid custom format data for {custom_format_name}")
|
||||
return 0
|
||||
|
||||
if data['trash_id'] != trash_id:
|
||||
# TODO: Better log
|
||||
print(f"Trash ID {trash_id} not found in trash_score_set for custom format {custom_format_name}")
|
||||
return 0
|
||||
|
||||
trash_scores = data.get('trash_scores', {})
|
||||
if not trash_scores:
|
||||
print(f"No trash scores found in {custom_format_name}")
|
||||
return 0
|
||||
|
||||
return trash_scores.get(trash_score_set, trash_scores.get('default', 0))
|
||||
|
||||
|
||||
def collect_profile_formats(trash_score_set, format_items, output_dir):
|
||||
profile_format = []
|
||||
for name, trash_id in format_items.items():
|
||||
score = find_score_for_custom_format(trash_score_set, name, trash_id, output_dir)
|
||||
if score == 0:
|
||||
continue
|
||||
|
||||
profile_format.append({
|
||||
'name': name,
|
||||
'score': score
|
||||
})
|
||||
return profile_format
|
||||
|
||||
|
||||
def collect_qualities(items):
|
||||
qualities = []
|
||||
quality_id = 1
|
||||
quality_collection_id = -1
|
||||
for item in items:
|
||||
if item.get('allowed', False) is False:
|
||||
continue
|
||||
|
||||
quality = {
|
||||
'name': item.get('name', ''),
|
||||
}
|
||||
if item.get('items') is not None:
|
||||
quality['id'] = quality_collection_id
|
||||
quality_collection_id -= 1
|
||||
quality['description'] = ''
|
||||
quality['qualities'] = []
|
||||
for sub_item in item['items']:
|
||||
quality['qualities'].append({
|
||||
'id': quality_id,
|
||||
'name': sub_item
|
||||
})
|
||||
quality_id += 1
|
||||
else:
|
||||
quality['id'] = quality_id
|
||||
quality_id += 1
|
||||
qualities.append(quality)
|
||||
|
||||
return qualities
|
||||
|
||||
def collect_profile(service, file_name, input_json, output_dir):
|
||||
# Compose YAML structure
|
||||
name = input_json.get('name', '')
|
||||
trash_id = input_json.get('trash_id', '')
|
||||
yml_data = {
|
||||
'name': name,
|
||||
'description': f"""Profile from TRaSH-Guides.
|
||||
{input_json.get('trash_description', '')}""",
|
||||
'trash_id': trash_id,
|
||||
'tags': [],
|
||||
'upgradesAllowed': input_json.get('upgradeAllowed', True),
|
||||
'minCustomFormatScore': input_json.get('minFormatScore', 0),
|
||||
'upgradeUntilScore': input_json.get('cutoffFormatScore', 0),
|
||||
'minScoreIncrement': input_json.get('minUpgradeFormatScore', 0),
|
||||
'qualities': collect_qualities(input_json.get('items', [])),
|
||||
'custom_formats': collect_profile_formats(input_json.get('trash_score_set'), input_json.get('formatItems', {}), output_dir),
|
||||
'language': input_json.get('language', 'any').lower(),
|
||||
}
|
||||
|
||||
# Output path
|
||||
output_path = os.path.join(output_dir, f"{file_name}-{trash_id}.yml")
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(yml_data, f, sort_keys=False, allow_unicode=True)
|
||||
print(f"Generated: {output_path}")
|
||||
|
||||
def collect_profiles(service, input_dir, output_dir):
|
||||
for root, _, files in os.walk(input_dir):
|
||||
for filename in files:
|
||||
if not filename.endswith('.json'):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, filename)
|
||||
file_stem = os.path.splitext(filename)[0] # Filename without extension
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
collect_profile(service, file_stem, data, output_dir)
|
||||
43
scripts/utils/regex_patterns.py
Normal file
43
scripts/utils/regex_patterns.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import os
|
||||
import json
|
||||
import yaml
|
||||
|
||||
def collect_regex_pattern(service, file_name, input_json, output_dir):
|
||||
# Find the first pattern in specifications
|
||||
pattern = None
|
||||
|
||||
for spec in input_json.get('specifications', []):
|
||||
implementation = spec.get('implementation')
|
||||
if implementation not in ['ReleaseTitleSpecification', 'ReleaseGroupSpecification']:
|
||||
continue
|
||||
|
||||
pattern = spec.get('fields', {}).get('value')
|
||||
if not pattern:
|
||||
continue
|
||||
# Compose YAML structure
|
||||
name = input_json.get('name', '')
|
||||
yml_data = {
|
||||
'name': name,
|
||||
'pattern': pattern,
|
||||
'description': "Regex pattern from TRaSH-Guides.",
|
||||
'tags': [],
|
||||
}
|
||||
|
||||
# Output path
|
||||
output_path = os.path.join(output_dir, f"{file_name}.yml")
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(yml_data, f, sort_keys=False, allow_unicode=True)
|
||||
print(f"Generated: {output_path}")
|
||||
|
||||
|
||||
def collect_regex_patterns(service, input_dir, output_dir):
|
||||
for root, _, files in os.walk(input_dir):
|
||||
for filename in files:
|
||||
if not filename.endswith('.json'):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, filename)
|
||||
file_stem = os.path.splitext(filename)[0] # Filename without extension
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
collect_regex_pattern(service, file_stem, data, output_dir)
|
||||
Loading…
Add table
Add a link
Reference in a new issue