Made changes to ensure publishing is done correctly, and also added a pause between publishes to prevent 'TOO_MANY_UNCONFIRMED' error.

This commit is contained in:
crowetic 2025-04-15 19:30:35 -07:00
parent 05793f26e1
commit efa1e821fa

View File

@ -18,6 +18,8 @@ SIZE_LIMIT_MB = 410
DEFAULT_CODEC = 'h264'
DEFAULT_USE_NVIDIA = False
LOG_FILE = "video-encode-and-publish.log"
BATCH_SIZE = 5
WAIT_SECONDS = 120
# Logging
def log(msg):
@ -94,6 +96,8 @@ def generate_all_thumbnails(video_path, duration):
thumbnails.append(f"data:image/webp;base64,{base64_encode_file(thumb_path)}")
return thumbnails
def base64_encode_file(path):
with open(path, 'rb') as f:
return base64.b64encode(f.read()).decode('utf-8')
@ -118,6 +122,34 @@ def get_api_key():
return input("Enter your Qortal API key: ").strip()
def generate_identifier_from_filename(filepath):
base = os.path.splitext(os.path.basename(filepath))[0]
slug = slugify(base)
return f"qtube_vid_{slug}"
def check_existing_identifier(base_identifier, channel_name):
try:
response = requests.get(
f"{API_URL}/arbitrary/resources/searchsimple",
params={
"service": "DOCUMENT",
"name": channel_name,
"limit": 0,
"reverse": "true"
},
headers={"accept": "application/json"}
)
response.raise_for_status()
entries = response.json()
for entry in entries:
if entry["identifier"].startswith(base_identifier):
return entry["identifier"].replace("_metadata", "")
except Exception as e:
log(f"[WARNING] Could not check existing identifiers: {e}")
return None
def build_sign_publish_from_file(service, identifier, name, file_path, private_key, dry_run=False, metadata={}):
"""
Publishes a file to QDN with metadata and signs it locally.
@ -372,9 +404,16 @@ def publish_qtube(video_path, private_key, mode='auto', dry_run=False):
if use_auto != 'yes':
title, fullDescription, htmlDescription, category, codec, use_nvidia = prompt_for_metadata()
slug = slugify(title)
base_slug = f"qtube_vid_{slug}"
identifier = None
base_identifier = generate_identifier_from_filename(video_path)
existing = check_existing_identifier(base_identifier, name)
if existing:
identifier = existing
log(f"[INFO] Reusing existing identifier: {identifier}")
else:
short_id = generate_short_id()
identifier = f"{base_identifier}_{short_id}"
log(f"[INFO] Using new identifier: {identifier}")
short_id = None
try:
@ -452,6 +491,24 @@ def publish_qtube(video_path, private_key, mode='auto', dry_run=False):
log(f"Publishing METADATA: {metadata_identifier}")
build_sign_publish_from_file("DOCUMENT", metadata_identifier, name, metadata_file_path, private_key, dry_run)
def process_video_batch(video_paths, private_key, mode='auto', dry_run=False):
batch = []
for video_path in video_paths:
batch.append(video_path)
if len(batch) >= BATCH_SIZE:
process_and_wait(batch, private_key, mode, dry_run)
batch = []
if batch:
process_and_wait(batch, private_key, mode, dry_run)
def process_and_wait(batch, private_key, mode, dry_run):
for video_path in batch:
try:
publish_qtube(video_path, private_key, mode=mode, dry_run=dry_run)
except Exception as e:
log(f"[ERROR] Failed to publish {video_path}: {e}")
log(f"[WAIT] Waiting {WAIT_SECONDS} seconds before next batch...")
time.sleep(WAIT_SECONDS)
def main():
parser = argparse.ArgumentParser()
@ -460,9 +517,9 @@ def main():
args = parser.parse_args()
private_key = get_private_key()
video_paths = []
for root, dirs, files in os.walk(os.getcwd()):
# Skip folders we don't want to process
if 'ORIGINALS' in dirs:
dirs.remove('ORIGINALS')
if 'too_large' in dirs:
@ -476,7 +533,6 @@ def main():
try:
full_path = os.path.join(root, file)
# Per-folder QDN.json check
if not qdn_template_cache:
metadata_path = os.path.join(root, 'QDN.json')
if os.path.exists(metadata_path):
@ -493,16 +549,13 @@ def main():
if should_reencode(full_path):
reencoded_path = reencode_video(full_path, codec=codec, use_nvidia=use_nvidia)
if reencoded_path:
publish_qtube(reencoded_path, private_key, mode=args.mode, dry_run=args.dry_run)
video_paths.append(reencoded_path)
else:
publish_qtube(full_path, private_key, mode=args.mode, dry_run=args.dry_run)
video_paths.append(full_path)
except Exception as e:
log(f"Failed to process {file}: {e}")
process_video_batch(video_paths, private_key, mode=args.mode, dry_run=args.dry_run)
if __name__ == "__main__":
main()