为何我的EXCEL里东西不多占用的内存却很大

Posted

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了为何我的EXCEL里东西不多占用的内存却很大相关的知识,希望对你有一定的参考价值。

参考技术A 分类: 电脑/网络 >> 软件
问题描述:

下午在做EXCEL的时候速度莫名的变慢了,关了好几个进程都不行.

后来保存的时候发现本来只要十几兆的东西却变成了100多兆.谁能告诉我为什么吗?

解析:

里面空的表格太多,你可以把有用的内容剪切到新文件里,注意不要无限地向下或向右拉滚动条

通过进程id找到进程对应的容器并统计每个进程的内存占用写到excel里

  1 # coding=utf-8
  2 import re
  3 import os
  4 import commands
  5 import json
  6 import psutil
  7 from pyExcelerator import *
  8 
  9 
 10 def execute(cmd):
 11     status, output = commands.getstatusoutput(cmd)
 12     if status != 0:
 13         raise Exception(status is %s, output is %s % (status, output))
 14     return output
 15 
 16 
 17 def get_all_container_ids_name():
 18     infos = execute("docker ps |awk ‘{print $1, $NF}‘").split(\n)
 19     all_ids = {}
 20     regex = re.compile(\s+)
 21     for info in infos:
 22         docker_id, docker_name = regex.split(info)
 23         short_id = docker_id.strip()
 24         if short_id.strip().startswith(CON):
 25             continue
 26         full_id = execute("docker inspect -f ‘{{.Id}}‘ %s" % short_id)
 27         state = execute("cat /run/runc/%s/state.json" % full_id)
 28         f = json.loads(state)
 29         cgroup_paths = f[cgroup_paths][pids]
 30         pids_path = os.path.join(cgroup_paths, cgroup.procs)
 31         ids = execute("cat %s" % pids_path).split(\n)
 32         for prgress_id in ids:
 33             pr_id = prgress_id.strip()
 34             all_ids[pr_id] = {id: short_id, name: docker_name}
 35     return all_ids
 36 
 37 
 38 def get_process_info(p):
 39     try:
 40         # cpu = int(p.cpu_percent(interval=1))
 41         rss = p.memory_info().rss
 42         name = p.name()
 43         pid = p.pid
 44         return %s,%s,%s\n % (pid, name, rss)
 45     except Exception as e:
 46         print e.message
 47 
 48 
 49 def get_all_process_info():
 50     """取出全部进程的进程名,进程ID,进程实际内存, 虚拟内存,CPU使用率
 51     """
 52     node_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), node_test.log)
 53     instances = ‘‘
 54     all_processes = list(psutil.process_iter())
 55     for proc in all_processes:
 56         ret = get_process_info(proc)
 57         if ret:
 58             instances += ret
 59     with open(node_name, w) as fp:
 60         fp.writelines(instances)
 61 
 62 
 63 def get_docker_name():
 64     file_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), node_test.log)
 65     result_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), data.txt)
 66     id_name_relation = get_all_container_ids_name()
 67     tmp = ‘‘
 68     regex = re.compile(,)
 69     with open(file_name, r) as fp:
 70         for progress_info in fp.readlines():
 71             progress_id, mem, progress_name = regex.split(progress_info)[0], regex.split(progress_info)[2],  72                                               regex.split(progress_info)[1]
 73             if progress_id in id_name_relation:
 74                 tmp += %s %s %s %s %s\n % (progress_id, id_name_relation[progress_id][id],
 75                                              id_name_relation[progress_id][name], progress_name, mem)
 76             else:
 77                 tmp += %s %s %s %s %s\n % (progress_id, sys_progress, sys_progress, progress_name, mem)
 78     with open(result_name, w) as fp:
 79         fp.writelines(tmp)
 80 
 81 
 82 def ge_excel():
 83     file_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), data.txt)
 84     result_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), data.xlsx)
 85     regex = re.compile( )
 86     w = Workbook()  # 创建一个工作簿
 87     ws = w.add_sheet(node_1_data)  # 创建一个工作表
 88     ws.write(0, 0, pid)
 89     ws.write(0, 1, docker_id)
 90     ws.write(0, 2, docker_name)
 91     ws.write(0, 3, progress_name)
 92     ws.write(0, 4, mem(MB))
 93     index = 1
 94     with open(file_name, r) as fp:
 95         for info in fp.readlines():
 96             progress_info = info.strip()
 97             if progress_info:
 98                 progress_id, docker_id, docker_name, progress_name, mem = regex.split(progress_info)
 99                 ws.write(index, 0, progress_id)
100                 ws.write(index, 1, docker_id)
101                 ws.write(index, 2, docker_name)
102                 ws.write(index, 3, progress_name)
103                 ws.write(index, 4, float(mem) / 1024 / 1024)
104                 index += 1
105     w.save(result_name)
106 
107 
108 def delete_tmp_file():
109     data_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), data.txt)
110     node_test_name = os.path.join(os.path.dirname(os.path.abspath(__file__)), node_test.log)
111     if os.path.exists(data_name):
112         os.remove(data_name)
113     if os.path.exists(node_test_name):
114         os.remove(node_test_name)
115 
116 
117 if __name__ == __main__:
118     delete_tmp_file()
119     get_all_process_info()
120     get_docker_name()
121     ge_excel()
122     delete_tmp_file()

 

以上是关于为何我的EXCEL里东西不多占用的内存却很大的主要内容,如果未能解决你的问题,请参考以下文章

poi导出excel数据多导出慢

通过进程id找到进程对应的容器并统计每个进程的内存占用写到excel里

为啥我的安卓手机内存卡,里面有的只有几百b的文件,却占用了300多kb的大小?

手机内存卡里显示满了,可是文件夹里却啥都没有,为啥?

手机里有个叫p2pcache的文件夹,很大,占内存。能删了吗?删了对手机有影响吗?

java中使用map时控制其所占内存