文件分类本地存取

# coding=utf-8
from __future__ import print_function
import paramiko
import time
import os

# init a sftp connection
def sftp_connect(ip, port, usr, pw):
    t = paramiko.Transport(ip, port)
    t.connect(username=usr, password=pw)
    sftp = paramiko.SFTPClient.from_transport(t)
    return sftp

def writefile(ftxt, content):
    with open(ftxt, ‘w‘) as f:
        f.write(content)

def getdiferent(l1, l2):
    if l2 != l1:
        print (‘get different‘)
        #f = open("D:\Study\python\log\ppemote.txt", ‘r+‘)
        for x in l1:
            #f.write(x+"\n")
            if x in l2:
                pass
            else:
                tmplist.append(x)
        #f.close()
        #f = open("D:\Study\python\log\local.txt", ‘r+‘)
        for y in l2:
            #f.write(y+"\n")
            if y in l1:
                pass
            else:
                tmplist.append(y)
        #f.close()
    else:
        pass
    return tmplist

def listdir(job_dir):
    dircontent = sftp1.listdir(job_dir)
    return dircontent

def waiteseconds(sec):
    for x in range(sec, 0, -1):
        print (x)
        time.sleep(1)

def classifylog(job_dir, list1, list2):
    print("remotelist and locallist is: ", len(list1), len(list2))
    # compare 2 lists and get the different
    different_list = getdiferent(list1, list2)
    print("different_list is :", different_list, len(different_list))

    if len(different_list) != 0:
        print ("list1 != list2")
        for j in range(0, len(different_list)):

            print ("---------------------------")
            casename = different_list[j]
            print ("case: ", casename)

            # new case folder
            os.chdir(localfolder + "\\" + job_dir.split(‘/job‘)[1])
            localcase_list = os.listdir(localfolder + "\\" + job_dir.split(‘/job‘)[1])
            if casename not in localcase_list:
                os.mkdir(casename)
            localfile = os.getcwd() + "\\" + casename + "\\" + "harness.log"

            # download log to local folder
            logdir = logfolder + "/" + casename
            try:
                sftp1.chdir(logdir)
                remotefile = sftp1.getcwd() + "/" + "harness.log"
                print ("remotefile: " + remotefile)
                sftp1.get(remotefile, localfile)
                print ("harness log download to localfile completed!")
            except IOError as e:
                print (e)
                print ("No log exists for this case:", casename)

    else:
        print ("list1 == list2")
        pass

if __name__ == ‘__main__‘:
    # init some vars
    tmplist = []
    remote_ip = "x.x.x.x"
    sftp_port = 21
    username = "username"
    password = "password"
    faileddir = "/home/username/job/failed"
    aborteddir = "/home/username/job/aborted"
    completedir = "/home/username/job/complete"
    localfolder = "D:\Study\python\log"
    logfolder = "/home/username/log"

    # new some folders in local disk
    os.chdir(localfolder)
    folder_list = os.listdir(localfolder)
    if "failed" not in folder_list:
        os.mkdir(‘failed‘)
    if "complete" not in folder_list:
        os.mkdir(‘complete‘)
    if "aborted" not in folder_list:
        os.mkdir(‘aborted‘)

    local_failed_dir = localfolder + "\\" + "failed"
    local_complete_dir = localfolder + "\\" + "complete"
    local_aborted_dir = localfolder + "\\" + "aborted"

    # new a sftp connection
    print ("sftp connecting ... please wait...")
    sftp1 = sftp_connect(remote_ip, sftp_port, username, password)
    print ("sftp connection completed")

    # check the failed/aborted/complete folders
    remote_failed_list = listdir(faileddir)
    for i in range(0, len(remote_failed_list)):
        casename1 = remote_failed_list[i].split(‘.xml‘)[0]
        remote_failed_list[i] = casename1
    print ("remote_failed_list: ", len(remote_failed_list), remote_failed_list)

    remote_aborted_list = listdir(aborteddir)
    for i in range(0, len(remote_aborted_list)):
        casename2 = remote_aborted_list[i].split(‘.xml‘)[0]
        remote_aborted_list[i] = casename2
    print ("remote_aborted_list: ", len(remote_aborted_list), remote_aborted_list)

    remote_complete_list = listdir(completedir)
    for i in range(0, len(remote_complete_list)):
        casename3 = remote_complete_list[i].split(‘.xml‘)[0]
        remote_complete_list[i] = casename3
    print ("remote_complete_list: ", len(remote_complete_list), remote_complete_list)

    # check local failed/aborted/complete folders
    local_failed_list = os.listdir(local_failed_dir)
    print("local_failed_list: ", local_failed_list)
    local_aborted_list = os.listdir(local_aborted_dir)
    local_complete_list = os.listdir(local_complete_dir)

    # ==========classifylog==========
    print ("--------1 checking ‘failed‘------will start in 5 seconds------")
    waiteseconds(5)
    tmplist = []
    classifylog(faileddir, remote_failed_list, local_failed_list)
    local_failed_list = remote_failed_list
    print ("failed log check completed")

    print ("--------2 checking ‘aborted‘-----will start in 5 seconds---------")
    waiteseconds(5)
    tmplist = []
    classifylog(aborteddir, remote_aborted_list, local_aborted_list)
    local_aborted_list = remote_aborted_list
    print ("aborted log check completed")

    print ("--------3 checking ‘complete‘------will start in 5 seconds--------")
    waiteseconds(5)
    tmplist = []
    classifylog(completedir, remote_complete_list, local_complete_list)
    local_complete_list = remote_complete_list
    print ("complete log check completed")

    print ("close sftp connection")
    sftp1.close()

资料:

文件读写:http://www.cnblogs.com/ymjyqsx/p/6554817.html

异常处理:http://www.cnblogs.com/Lival/p/6203111.html

os文件操作:http://www.jb51.net/article/59901.htm

时间: 2024-11-07 21:18:50

文件分类本地存取的相关文章

js 保存文件到本地

原文http://blog.163.com/[email protected]/blog/static/87727415201310975054613/ js 保存文件到本地 2013-11-09 19:56:35| 分类: 默认分类 |举报|字号 订阅 var obj_target = document.createElementNS('http://www.w3.org/1999/xhtml', 'a'); if(obj_target)//非ie { obj_target.href = ‘1

给大家分享web开发新手修改hosts文件实现本地域名访问的正确方法

1.如何正确修改hosts文件: 一般打开hosts文件里面都会有个示例,按照其格式修改即可 比如以下内容: # For example: # # 102.54.94.97 rhino.acme.com # source server # 38.25.63.10 x.acme.com # x client host 即代表打开rhino.acme.com这个网址将解析到102.54.94.97,ip地址与网址间至少有一空格,当然建议通过按Table键来编辑,即美观又不容易编写失误;这也就是通过解

Open SSH 登陆远程主机,怎样下载文件到本地?

许多人使用简易的SSH连接工具,有时候需要在SSH下复制文件到本地查看比较方便,这就用到了SCP命令. scp是有Security的文件copy,基于ssh登录,操作起来比较方便. 比如要把当前一个文件copy到远程另外一台主机上,可以如下命令. <span style="font-size:14px;">scp /home/example.tar.gz [email protected] host IP:/home/root</span> 如果想反过来操作,把

文件分类总结

文件太乱了,总结整理方法: 1分类的层级按照树形结构,一个分类对应一个文件夹 2所有的分类不能重复 3所有同层等级内的文件夹按照相似或相同的目录进行分类,避免混乱,统一整理 4下载文件前先创建对应文件的临时分类,然后下载至该临时分类,文件使用完成后进行删减,然后将留下来有用的放置到总树目录下,最后删除临时分类 5已经分类好的分类不能多也不能少,除非你发现该分类有问题 6下班关机将各文件各就各位,每天的目录结构必须不变,变得只是分类中的文件数量 7千万不要创建垃圾分类,分类必须在自己的掌控下进行创

HDFS文件与本地文件的交互操作

1.在HDFS中创建一个新的文件夹,用于保存weblog_entries.txt hadoop fs -mkdir /data/weblogs 2.将weblog_entries.txt文件从本地文件系统复制到HDFS刚创建的新文件夹下 cd /home/data hadoop fs -copyFromLocal weblog_entries.txt /data/weblogs 3.列出HDFS上weblog_entries.txt文件的信息: hadoop fs –ls /data/weblo

SSH连接下复制远程linux服务器文件到本地的命令(zz)

原文链接 许多人使用简易的SSH连接工具,有时候需要在SSH下复制文件到本地查看比较方便,我给大家介绍一个简单的命令SCP. scp是有Security的文件copy,基于ssh登录.操作起来比较方便,比如要把当前一个文件copy到远程另外一台主机上,可以如下命令. scp /home/daisy/full.tar.gz [email protected]:/home/root 然后会提示你输入另外那台172.19.2.75主机的root用户的登录密码,接着就开始copy了. 如果想反过来操作,

PHP CURL实现远程下载文件到本地

<?php //$result=httpcopy('http://www.phpernote.com/image/logo.gif'); echo '<pre>';print_r($result); function httpcopy($url,$file='',$timeout=60){ $file=empty($file)?pathinfo($url,PATHINFO_BASENAME):$file; $dir=pathinfo($file,PATHINFO_DIRNAME); !i

根据txt文件中指定的文件名进行文件分类

根据txt文件中指定的文件名进行文件分类: // 根据txt文件中指定的文件名进行文件分类 #include <stdio.h> #include <stdlib.h> #include <malloc.h> #include <windows.h> #include <io.h> #include <direct.h> #include <string.h> void Remove_End_Char(char a[])

大开测试:性能- 如何下载并保存文件到本地(连载18)

7.18  如何下载并保存文件到本地 1.问题提出 如何下载并保存文件到本地? 2.问题解答 一个人事管理系统项目一般都要实现能够上传和下载电子文件(如学位照.身份证.护照或者其他Word.Excel.Pdf等格式的电子文件),测试时为了模拟下载的场景,需要编写相关脚本.在HTTP中,没有任何一个方法或是动作能够标识“下载文件”这个动作,对HTTP来说,无论是下载文件或者请求页面,都只是发出一个GET请求,LoadRunner记录了客户端发出的对文件的请求,并能够收到文件内容.因此,完全可以通过