Script não executado com cron [closed]

2

eu tenho um script python quando eu lanço meu script tudo está ok, mas quando eu lauchn meu scrypt sob crontab

eu tenho este erro:

.. 2015-04-24 14:36:02,163 ERROR Problème dans le module importData[Errno 2] No such file or directory: '/opt/scripts/stockvo.json' ..

Meuscriptpy:

#!/usr/bin/envpython#-*-coding:latin-1-*-defmoveFTPFiles(serverName,userName,passWord,remotePath,localPath,deleteRemoteFiles=False,onlyDiff=False):"""Connect to an FTP server and bring down files to a local directory"""
    import os
    import sys
    import glob
    from sets import Set
    import ftplib 

    logger.info(' Suppressions des anciennes photos du repertoire: '+localDirectoryPath)

    os.chdir(localDirectoryPath)
    files=glob.glob('*.*')
    for filename in files:
        os.unlink(filename)


    try:
        ftp = ftplib.FTP(serverName)
        ftp.login(userName,passWord)
        ftp.cwd(remotePath)
        logger.info(' Connexion au serveur '+serverName)
        logger.info(' Téléchargement des photos depuis '+serverName+' vers le repertoire '+localDirectoryPath)
        if onlyDiff:
            lFileSet = Set(os.listdir(localPath))
            rFileSet = Set(ftp.nlst())
            transferList = list(rFileSet - lFileSet)
            logger.info(' Nombres de photos à récuperer ' + str(len(transferList)))
        else:
            transferList = ftp.nlst()
        delMsg = "" 
        filesMoved = 0
        for fl in transferList:
            # create a full local filepath
            localFile = localPath + fl
            # print "Create a full local filepath: " + localFile
            grabFile = True
            if grabFile:                
                #open a the local file
                fileObj = open(localFile, 'wb')
                # Download the file a chunk at a time using RETR
                ftp.retrbinary('RETR ' + fl, fileObj.write)
                # Close the file
                # print "Close the file "
                fileObj.close()
                filesMoved += 1
                #print "Uploaded: " + str(filesMoved)
                #sys.stdout.write(str(filesMoved)+' ')
                #sys.stdout.flush()

            # Delete the remote file if requested
            if deleteRemoteFiles:
                ftp.delete(fl)
                delMsg = " and Deleted"         

        logger.info(' Nombre de photos récupérées' + delMsg + ': ' + str(filesMoved) + ' le ' + timeStamp())
    except ftplib.all_errors as e:
        logger.error(' Problème dans le module moveFTPFiles' + '%s' % e)
    ftp.close() # Close FTP connection
    ftp = None

def timeStamp():
    """returns a formatted current time/date"""
    import time
    return str(time.strftime("%a %d %b %Y %I:%M:%S %p"))

def importData(serverName,userName,passWord,directory,filematch,source,destination):
    import socket
    import ftplib
    import os
    import subprocess
    import json

    try:
        ftp = ftplib.FTP(serverName)
        ftp.login(userName,passWord)
        ftp.cwd(directory)
        logger.info(' Connexion au serveur '+serverName)
        # Loop through matching files and download each one individually
        for filename in ftp.nlst(filematch):
            local_filename = os.path.join('/opt/scripts/', filename)
            fhandle = open(local_filename, 'wb')
            logger.info(' Téléchargement du fichier de données ' + filename)
            ftp.retrbinary('RETR ' + filename, fhandle.write)
            fhandle.close()

        #convert xml to json
        logger.info(' Conversion du fichier ' + filename + ' au format .json ')
        subprocess.call('xml2json -t xml2json -o /opt/scripts/stockvo.json /opt/scripts/stockvo.xml --strip_text', shell=True)

        #modify json file
        logger.info(' Modification du fichier .json')
        data = json.loads(open("/opt/scripts/stockvo.json").read())
        with open("/opt/scripts/stockvo.json", "w") as outfile:
                json.dump(data["Stock"]["Vehicule"], outfile)

        #move json file
        logger.info(' Déplacement du fichier de données .json vers le répertoire /opt/scripts/')
        os.system('mv %s %s' % ('/opt/scripts/stockvo.json', '/opt/data/stockvo.json'))

        #import json file to MongoDB
        logger.info(' Import du fichier .json vers la base MongoDB')
        #subprocess.call('mongoimport --db AutoPrivilege -c cars stockvo.json --jsonArray --upsert --drop',shell=True)
        p = subprocess.Popen(['mongoimport', '--db', 'AutoPrivilege', '-c', 'cars', '/opt/data/stockvo.json', '--jsonArray', '--upsert', '--drop'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        stdout, stderr = p.communicate()

        if stdout:
            logger.info(stdout)

        if stderr:
            logger.error(stderr)


        #remove xml file
        logger.info(' Suppression du fichier ' + filename)
        ## if file exists, delete it ##
        myfile="/opt/scripts/stockvo.xml"
        if os.path.isfile(myfile):
            os.remove(myfile)

    except ftplib.all_errors as e:
        logger.error(' Problème dans le module importData' + '%s' % e)
    ftp.close() # Close FTP connection
    ftp = None

import time
import datetime
import re
import os
import stat
import logging
import logging.handlers as handlers
import subprocess

class SizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler):
    """
    Handler for logging to a set of files, which switches from one file
    to the next when the current file reaches a certain size, or at certain
    timed intervals
    """
    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None,
                 delay=0, when='h', interval=1, utc=False):
        # If rotation/rollover is wanted, it doesn't make sense to use another
        # mode. If for example 'w' were specified, then if there were multiple
        # runs of the calling application, the logs from previous runs would be
        # lost if the 'w' is respected, because the log file would be truncated
        # on each run.
        if maxBytes > 0:
            mode = 'a'
        handlers.TimedRotatingFileHandler.__init__(
            self, filename, when, interval, backupCount, encoding, delay, utc)
        self.maxBytes = maxBytes

    def shouldRollover(self, record):
        """
        Determine if rollover should occur.

        Basically, see if the supplied record would cause the file to exceed
        the size limit we have.
        """
        if self.stream is None:                 # delay was set...
            self.stream = self._open()
        if self.maxBytes > 0:                   # are we rolling over?
            msg = "%s\n" % self.format(record)
            self.stream.seek(0, 2)  #due to non-posix-compliant Windows feature
            if self.stream.tell() + len(msg) >= self.maxBytes:
                return 1
        t = int(time.time())
        if t >= self.rolloverAt:
            return 1
        return 0

if __name__ == '__main__':      

    #log to a file
    log_filename='/opt/log/importData.log'
    logger=logging.getLogger()  
    logger.setLevel(logging.DEBUG)
    handler=SizedTimedRotatingFileHandler(
        log_filename, maxBytes=10000, backupCount=5,
        when='s',interval=60,
        # encoding='bz2',  # uncomment for bz2 compression
    )
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    #--- constant connection values
    ftpServerName = "xxx.xx
    ftpU = "xxxx"
    ftpP = "xxxx"
    remoteDirectoryPath = "/xx/xxx/xxx/"
    localDirectoryPath = "/xx/xx/xx/xxxx/"

    directory = '/datas/'
    filematch = '*.xml'
    source='/opt/scripts/'
    destination='/opt/data/'
    start = time.time()
    logger.info('================================================')
    logger.info('================  DEBUT SCRIPT =================')
    logger.info('================================================')
    deleteAfterCopy = False     #set to true if you want to clean out the remote directory
    onlyNewFiles = True         #set to true to grab & overwrite all files locally
    importData(ftpServerName,ftpU,ftpP,directory,filematch,source,destination)
#   moveFTPFiles(ftpServerName,ftpU,ftpP,remoteDirectoryPath,localDirectoryPath,deleteAfterCopy,onlyNewFiles)
    end = time.time()
    elapsed_time = end - start
    now = time.strftime("%H:%M", time.localtime(elapsed_time))
    logger.info('================================================')
    logger.info('================== FIN SCRIPT ==================')
    logger.info('======== Tps d''execution: ' + now + ' minutes =========')
    logger.info('================================================')

Atualização:

adicione meu fodler no meu $PATH :

PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/opt/scripts

é o que eu esqueci algumas coisas (privilégio, ..)?

    
por Mercer 24.04.2015 / 14:44

1 resposta

3

Para resolver meu problema, adicionei essas linhas ao crontab

PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/opt/scripts 
0 4 * * * /opt/scripts/importData.py

obrigado a val0x00ff que me deu a solução

    
por 24.04.2015 / 16:34

Tags