from datetime import datetime
from urllib.request import urlopen
from scrapyd_api import ScrapydAPI
from .models import Job


class Service:

    def __init__(self):
        self.api = ScrapydAPI()

    def update_jobs_status(self):
        try:
            jobs = self.api.list_jobs('exa')
            finished = jobs.get('finished')
            if finished:
                for i in finished:
                    Job.objects.filter(job_uuid=i['id'], status='RUN').update(status='FIN', end_time=i['end_time'])
        except:
            pass

    def run_job(self, job):
        job = Job.objects.get(pk=job)
        uuid = self.api.schedule('exa', job.spider.sp_name, query=job.query)
        job.job_uuid = uuid
        job.start_time = datetime.now()
        job.status = 'RUN'
        job.save()
        return job.json()

    def cancel_job(self, job):
        job = Job.objects.get(pk=job)
        self.api.cancel('exa', job.job_uuid)
        job.end_time = datetime.now()
        job.status = 'FIN'
        job.save()
        return job.json()

    def delete_job(self, job):
        job = Job.objects.get(pk=job)
        job.status = 'DEL'
        job.save()
        return job.json()

    def activate_job(self, job):
        job = Job.objects.get(pk=job)
        job.start_time = None
        job.end_time = None
        job.status = 'PEN'
        job.save()
        return job.json()

    def restart_job(self, job):
        job = Job.objects.get(pk=job)
        uuid = self.api.schedule('exa', job.spider.sp_name, query=job.query)
        job.job_uuid = uuid
        job.start_time = datetime.now()
        job.end_time = None
        job.status = 'RUN'
        job.save()
        return job.json()

    def get_log(self, job):
        job = Job.objects.get(pk=job)
        url = 'http://127.0.0.1:6800/logs/exa/{}/{}.log'.format(job.spider.sp_name, job.job_uuid)
        response = urlopen(url)
        return response.read()