def newDLtoAdd(url, found_id, filename, found_season, found_episode, dl_dir):
    size = common.getContentLength(url)
    #out = dl_dir + filename
    filename = unicode(filename, errors='ignore')
    #Create the new Job
    new_job = Job()
    new_job.status = 'Queued'
    new_job.queue_id = len(Job.objects.all())
    new_job.process_pid = -1
    new_job.dl_speed = 0
    new_job.time_seg_start = -1
    new_job.time_seg_end = -1
    new_job.display_size = common.convert_bytes(size)
    new_job.total_size = size
    new_job.dled_size = 0
    #new_job.dled_dif_size = 0; removed
    new_job.full_url = url
    new_job.local_directory = dl_dir
    new_job.filename = filename
    new_job.notes = "Auto DLed:  " + new_job.local_directory + filename
    new_job.progress = 0
    new_job.eta = ""
    new_job.save()

    #Create the new Log
    new_log = log()
    new_log.notes = 'Auto DLed'
    new_log.ts = datetime.now()
    new_log.season_num = int(found_season)
    new_log.episode_num = int(found_episode)
    new_log.save()

    #link the log to the autoDLEntry
    tobe_updated = autoDLEntry.objects.get(id=found_id)
    tobe_updated.logs.add(new_log)
示例#2
0
def newDLtoAdd(url, found_id, filename, found_season, found_episode, dl_dir,
               size):
    #out = dl_dir + filename
    try:
        filename = unicode(filename, errors='ignore')
    except TypeError:  #if type error occurs, just pass, use filename untouched
        pass
    #Create the new Job
    try:
        Job.objects.lock()
        new_job = Job()
        new_job.status = 'Queued'
        new_job.queue_id = len(Job.objects.all())
        new_job.process_pid = -1
        new_job.gid = -1
        new_job.dl_speed = 0
        new_job.time_seg_start = -1
        new_job.time_seg_end = -1
        new_job.display_size = common.convert_bytes(size)
        new_job.total_size = size
        new_job.dled_size = 0
        new_job.full_url = url
        new_job.local_directory = dl_dir
        new_job.filename = common.name_wrapper(filename)
        new_job.notes = "Auto DLed:  " + new_job.local_directory + new_job.filename
        new_job.progress = 0
        new_job.eta = ""
        new_job.save()
    finally:
        Job.objects.unlock()
    #Create the new Log
    new_log = log()
    new_log.notes = 'Auto DLed'
    new_log.ts = datetime.now()
    new_log.season_num = int(found_season)
    new_log.episode_num = int(found_episode)
    new_log.save()

    #link the log to the autoDLEntry
    tobe_updated = autoDLEntry.objects.get(id=found_id)
    tobe_updated.logs.add(new_log)
示例#3
0
def file_stats(id):
    a_job = Job.objects.get(id=int(id))
    if a_job.autorename == True:
        rename(a_job)
        
    log_to_file("Retrieving stats for: " + a_job.filename)
    status, filename, size = common.getEntryInfo(a_job.full_url)
    try:
        Job.objects.lock()   
        a_job.total_size = size
        a_job.display_size = common.convert_bytes(a_job.total_size)
        
        log_to_file("Updating status for: " + a_job.filename)
        if a_job.status.endswith('Queue'):
            a_job.status = 'Queued'
        elif a_job.status.endswith('Stop'):
            a_job.status = 'Stopped'
        elif a_job.status.endswith('Start'):
            a_job.status = 'Starting...'
        a_job.save()    
    finally:
        Job.objects.unlock()
    log_to_file("Finished with for: " + a_job.filename)    
示例#4
0
            status = 'Queued'
        elif status.endswith('Stop'):
            status = 'Stopped'
        elif status.endswith('Start'):
            status = 'Starting...'
        
        #filename = unicode(filename, errors='ignore')
        new_job.autorename = autoRename
        new_job.status = status
        new_job.queue_id = len(Job.objects.all())
        new_job.gid = -1
        new_job.process_pid = -1
        new_job.dl_speed = 0
        new_job.time_seg_start = -1
        new_job.time_seg_end = -1
        new_job.display_size = common.convert_bytes(size)
        new_job.total_size = size
        new_job.dled_size = 0
        #new_job.dled_dif_size = 0; removed
        new_job.full_url = full_dl_path
        new_job.local_directory = local_dir
        new_job.filename = common.name_wrapper(filename)
        new_job.notes = "CURL download: " + new_job.local_directory + new_job.filename
        new_job.progress = 0;
        new_job.eta = "";
        new_job.save()
    myparser.close()
    
def fixEntriesAfter(fix_queue_id):
    for a_job in Job.objects.all().order_by('queue_id'):
        if a_job.queue_id > fix_queue_id:
示例#5
0
from django.core.management.base import BaseCommand, CommandError
from bitflux.engine.models import autoDLEntry
from bitflux.engine.models import Job
from bitflux.engine.models import UserProfile
from bitflux.engine.models import deamon
from bitflux.engine.models import autoDLer
from bitflux.engine.models import autoDLEntry
from bitflux.engine.models import log
from bitflux.engine.management.commands import common
from django.conf import settings
import subprocess
import time
import datetime
import os
import sys
import ctypes
import time
import signal
from datetime import timedelta
import re
import threading, thread
import httplib
import urllib
import xmlrpclib
import random, string
import traceback
try:
    import json
except ImportError:
    import simplejson as json