benchmarking prep

This commit is contained in:
Askill 2020-12-20 19:46:11 +01:00
parent c828e2e0d2
commit 6ccf78b54a
8 changed files with 65 additions and 31 deletions

View File

@ -14,14 +14,17 @@ class Config:
"ttolerance": 20,
"videoBufferLength": 500,
"LayersPerContour": 220,
"avgNum":10
"avgNum":10,
"ce_average_threads": 16,
"ce_comp_threads":16,
"lf_threads":16,
}
def __init__(self):
'''This is basically just a wrapper for a json / python dict'''
print("Current Config:")
for key, value in self.c.items():
print(f"{key}:\t\t{value}")
#print("Current Config:")
#for key, value in self.c.items():
# print(f"{key}:\t\t{value}")
def __getitem__(self, key):
if key not in self.c:

View File

@ -46,7 +46,7 @@ class ContourExtractor:
self.lastFrames = None
self.averages = dict()
print("ContourExtractor initiated")
#print("ContourExtractor initiated")
def extractContours(self):
videoReader = VideoReader(self.config)
@ -55,10 +55,10 @@ class ContourExtractor:
threads = self.config["videoBufferLength"]
self.start = time.time()
# start a bunch of frames and let them read from the video reader buffer until the video reader reaches EOF
with ThreadPool(16) as pool:
with ThreadPool(self.config["ce_comp_threads"]) as pool:
while not videoReader.videoEnded():
if videoReader.buffer.qsize() == 0:
time.sleep(.5)
time.sleep(.1)
tmpData = [videoReader.pop() for i in range(0, videoReader.buffer.qsize())]
self.computeMovingAverage(tmpData)
@ -135,7 +135,7 @@ class ContourExtractor:
frames = self.lastFrames + frames
tmp = [[j, frames, averageFrames] for j in range(averageFrames, len(frames))]
with ThreadPool(16) as pool:
with ThreadPool(self.config["ce_average_threads"]) as pool:
pool.map(self.averageDaFrames, tmp)
self.lastFrames = frames[-averageFrames:]

View File

@ -16,7 +16,7 @@ class Exporter:
self.outputPath = config["outputPath"]
self.resizeWidth = config["resizeWidth"]
self.config = config
print("Exporter initiated")
#print("Exporter initiated")
def export(self, layers, contours, masks, raw = True, overlayed = True):
if raw:

View File

@ -5,7 +5,7 @@ class Importer:
self.path = config["importPath"]
def importRawData(self):
print("Loading previous results")
#print("Loading previous results")
with open(self.path, "rb") as file:
layers, contours, masks = pickle.load(file)
return (layers, contours, masks)

View File

@ -19,7 +19,7 @@ class LayerFactory:
self.resizeWidth = config["resizeWidth"]
self.footagePath = config["inputPath"]
self.config = config
print("LayerFactory constructed")
#print("LayerFactory constructed")
self.data = data
if data is not None:
self.extractLayers(data)
@ -37,7 +37,7 @@ class LayerFactory:
self.oldLayerIDs = []
with ThreadPool(16) as pool:
with ThreadPool(self.config["lf_threads"]) as pool:
for frameNumber in sorted(data.keys()):
contours = data[frameNumber]
masks = maskArr[frameNumber]
@ -49,11 +49,11 @@ class LayerFactory:
tmp = [[frameNumber, contour, mask]
for contour, mask in zip(contours, masks)]
#pool.map(self.getLayers, tmp)
for x in tmp:
self.getLayers(x)
pool.map(self.getLayers, tmp)
#for x in tmp:
#self.getLayers(x)
#self.joinLayers()
self.joinLayers()
return self.layers
def getLayers(self, data):

View File

@ -21,18 +21,18 @@ class LayerManager:
self.config = config
#self.classifier = Classifier()
self.tags = []
print("LayerManager constructed")
#print("LayerManager constructed")
def transformLayers(self):
print("'Cleaning' Layers")
print("Before deleting short layers ", len(self.layers))
#print("'Cleaning' Layers")
#print("Before deleting short layers ", len(self.layers))
self.freeMin()
print("Before deleting long layers ", len(self.layers))
#print("Before deleting long layers ", len(self.layers))
self.freeMax()
self.sortLayers()
self.calcStats()
self.deleteSparse()
print("after deleting sparse layers ", len(self.layers))
#print("after deleting sparse layers ", len(self.layers))
def deleteSparse(self):
toDelete = []

3
bm.csv Normal file
View File

@ -0,0 +1,3 @@
ce_average_threads,ce_comp_threads,lf_threads,videoBufferLength,ce,le,lm,ex
16,16,16,500,0,3.691664457321167,3.737372398376465,0.008670568466186523,3.7460429668426514
1 ce_average_threads,ce_comp_threads,lf_threads,videoBufferLength,ce,le,lm,ex
2 16,16,16,500,0,3.691664457321167,3.737372398376465,0.008670568466186523,3.7460429668426514

48
main.py
View File

@ -9,13 +9,23 @@ from Application.Importer import Importer
from Application.VideoReader import VideoReader
from Application.LayerManager import LayerManager
from Application.Classifiers import *
from itertools import product
import csv
def main(v1, v2, v3, v4):
def main():
startTotal = time.time()
start = startTotal
config = Config()
fileName = "3.mp4"
config["ce_average_threads"] = v1
config["ce_comp_threads"] = v2
config["lf_threads"] = v3
config["videoBufferLength"] = v4
fileName = "X23-1.mp4"
outputPath = os.path.join(os.path.dirname(__file__), "output")
dirName = os.path.join(os.path.dirname(__file__), "generate test footage")
@ -25,21 +35,25 @@ def main():
config["importPath"] = os.path.join(outputPath, fileName.split(".")[0] + ".txt")
config["w"], config["h"] = VideoReader(config).getWH()
stats = []
stats = [config["ce_average_threads"], config["ce_comp_threads"], config["lf_threads"], config["videoBufferLength"]]
if not os.path.exists(config["importPath"]):
contours, masks = ContourExtractor(config).extractContours()
stats.append(time.time() - start)
start = time.time()
print("Time consumed extracting contours: ", stats["Contour Extractor"])
#print("Time consumed extracting contours: ", stats["Contour Extractor"])
layerFactory = LayerFactory(config)
layers = layerFactory.extractLayers(contours, masks)
stats.append(time.time() - start)
start = time.time()
else:
stats.append(0)
layers, contours, masks = Importer(config).importRawData()
#layerFactory = LayerFactory(config)
#layers = layerFactory.extractLayers(contours, masks)
layerFactory = LayerFactory(config)
layers = layerFactory.extractLayers(contours, masks)
stats.append(time.time() - start)
layerManager = LayerManager(config, layers)
layerManager.transformLayers()
@ -52,16 +66,30 @@ def main():
if len(layers) == 0:
exit(1)
exporter = Exporter(config)
print(f"Exporting {len(contours)} Contours and {len(layers)} Layers")
exporter.export(layers, contours, masks, raw=True, overlayed=True)
#print(f"Exporting {len(contours)} Contours and {len(layers)} Layers")
#exporter.export(layers, contours, masks, raw=False, overlayed=True)
stats.append(time.time() - start)
print("Total time: ", time.time() - startTotal)
stats.append(time.time() - startTotal)
print(stats)
with open("bm.csv", "a") as myfile:
writer = csv.writer(myfile)
writer.writerow(stats)
#print(stats)
exit(0)
if __name__ == "__main__":
main()
ass = list(range(1, 18, 4))
bss = list(range(1, 18, 4))
css = list(range(1, 16, 8))
dss = list(range(50, 500, 200))
params = [ass, bss, css, dss]
params = list(product(*params))
counter = 0
for a,b,c,d in params:
print(f"{counter}/{len(params)} - {counter/len(params)} {a, b, c, d}")
counter += 1
main(a, b, c, d)