Skip to content

Commit

Permalink
Merge pull request #136 from makehumancommunity/_feature_fix_io_encoding
Browse files Browse the repository at this point in the history
Feature fix io encoding
  • Loading branch information
joepal1976 authored Nov 1, 2020
2 parents 00a955e + 050afd0 commit 0d2ac67
Show file tree
Hide file tree
Showing 37 changed files with 538 additions and 610 deletions.
18 changes: 8 additions & 10 deletions makehuman/apps/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import progress
import csv
import getpath
import io

def _parse_version(version_str):
version_str = version_str.lower().strip()
Expand Down Expand Up @@ -211,15 +210,14 @@ class MHM10Loader(object):
def getModifierMapping(self):
if self.modifier_mapping is None:
self.modifier_mapping = dict()
f = io.open(getpath.getSysDataPath('modifiers/mh_1-0_modifier_mapping.csv'), 'r', encoding='utf-8')
csvreader = csv.reader(f, delimiter=',', quotechar='"')
for r_idx, row in enumerate(csvreader):
if r_idx == 0:
# First line is header, drop it
continue
if row[0]:
self.modifier_mapping[row[0]] = (row[1], bool(row[2]))
f.close()
with open(getpath.getSysDataPath('modifiers/mh_1-0_modifier_mapping.csv'), 'r', encoding='utf-8') as f:
csvreader = csv.reader(f, delimiter=',', quotechar='"')
for r_idx, row in enumerate(csvreader):
if r_idx == 0:
# First line is header, drop it
continue
if row[0]:
self.modifier_mapping[row[0]] = (row[1], bool(row[2]))
return self.modifier_mapping

def loadProperty(self, line_data, default_load_callback, strict):
Expand Down
24 changes: 10 additions & 14 deletions makehuman/apps/gui/guiload.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
from getpath import formatPath
import filecache
import os
import io


class HumanFileSort(fc.FileSort):
Expand All @@ -59,18 +58,15 @@ def __init__(self):
def getMeta(self, filename):
meta = {}

import io
f = io.open(filename, 'r', encoding="utf-8")
for line in f:
line = line.strip()
lineData = line.split()
if not lineData:
continue
field = lineData[0]
if field in self.metaFields:
meta[field] = float(lineData[1])
f.close()

with open(filename, 'r', encoding="utf-8") as f:
for line in f:
line = line.strip()
lineData = line.split()
if not lineData:
continue
field = lineData[0]
if field in self.metaFields:
meta[field] = float(lineData[1])
return meta


Expand Down Expand Up @@ -136,7 +132,7 @@ def getMetadataImpl(self, filename):
uuid = ''
tags = set()
if os.path.isfile(filename) and os.path.splitext(filename)[1] == '.mhm':
with io.open(filename, 'r', encoding='utf-8') as f:
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
if line and not line.startswith('#'):
data = line.strip().split()
Expand Down
3 changes: 1 addition & 2 deletions makehuman/apps/gui/guimodifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
from collections import OrderedDict
import language
import collections
import io
from mesh_operations import calculateSurface

class ModifierTaskView(gui3d.TaskView):
Expand Down Expand Up @@ -219,7 +218,7 @@ def loadModifierTaskViews(filename, human, category, taskviewClass=None):
if not taskviewClass:
taskviewClass = ModifierTaskView

data = json.load(io.open(filename, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
data = json.load(open(filename, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
taskViews = []
# Create task views
for taskName, taskViewProps in data.items():
Expand Down
6 changes: 2 additions & 4 deletions makehuman/apps/human.py
Original file line number Diff line number Diff line change
Expand Up @@ -1456,7 +1456,6 @@ def refreshPose(self, updateIfInRest=False):
self.callEvent('onChanged', event)

def load(self, filename, update=True, strict=False):
import io

def _compare_versions(mhmVersion,pgmVersion):
""" Return true if major+minor matches, false if they do not. Ignore patch number. """
Expand Down Expand Up @@ -1488,7 +1487,7 @@ def _get_version(lineData):

subdivide = False

with io.open(filename, 'r', encoding="utf-8") as f:
with open(filename, 'r', encoding="utf-8") as f:

for lh in list(G.app.loadHandlers.values()):
try:
Expand Down Expand Up @@ -1588,14 +1587,13 @@ def _do_load_property(lineData):
log.message("Done loading MHM file.")

def save(self, filename):
import io
from progress import Progress
progress = Progress(len(G.app.saveHandlers))
event = events3d.HumanEvent(self, 'save')
event.path = filename
self.callEvent('onChanging', event)

with io.open(filename, "w", encoding="utf-8") as f:
with open(filename, "w", encoding="utf-8") as f:
f.write('# Written by MakeHuman %s\n' % getVersionStr())
f.write('version %s\n' % getShortVersion(noSub=True))
if self.getUuid():
Expand Down
5 changes: 2 additions & 3 deletions makehuman/apps/humanmodifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import operator
import numpy as np
import log
import io
import targets
from functools import reduce

Expand Down Expand Up @@ -674,7 +673,7 @@ def loadModifiers(filename, human):
from collections import OrderedDict
modifiers = []
lookup = OrderedDict()
data = json.load(io.open(filename, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
data = json.load(open(filename, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
for modifierGroup in data:
groupName = modifierGroup['group']
for mDef in modifierGroup['modifiers']:
Expand Down Expand Up @@ -708,7 +707,7 @@ def loadModifiers(filename, human):
descFile = _tmp[0]+'_desc'+_tmp[1]
hasDesc = OrderedDict([(key,False) for key in lookup.keys()])
if os.path.isfile(descFile):
data = json.load(io.open(descFile, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
data = json.load(open(descFile, 'r', encoding='utf-8'), object_pairs_hook=OrderedDict)
dCount = 0
for mName, mDesc in data.items():
try:
Expand Down
56 changes: 26 additions & 30 deletions makehuman/apps/metadataengine.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,14 +132,13 @@ def loadRecord(archivePath, recordID):
"""

time1 = time.perf_counter()
f = open(archivePath, 'r', encoding="utf-8")
record = None
for line in f:
if line.find(recordID) != -1:
record = line.split()
log.message('Found %s fields in %s sec', len(record), time.perf_counter() - time1)
break
f.close()
with open(archivePath, 'r', encoding="utf-8") as f:
record = None
for line in f:
if line.find(recordID) != -1:
record = line.split()
log.message('Found %s fields in %s sec', len(record), time.perf_counter() - time1)
break
return record


Expand All @@ -160,12 +159,11 @@ def searchRecord(archivePath, field):
"""

time1 = time.perf_counter()
f = open(archivePath, 'r', encoding="utf-8")
recordIDs = []
for line in f:
if line.find(field) != -1:
recordIDs.append(line.split()[0])
f.close()
with open(archivePath, 'r', encoding="utf-8") as f:
recordIDs = []
for line in f:
if line.find(field) != -1:
recordIDs.append(line.split()[0])
log.message('Found %s records in %s sec', len(recordIDs), time.perf_counter() - time1)
return recordIDs

Expand All @@ -192,28 +190,26 @@ def saveRecord(archivePath, recordToSave):
records = []
isExistent = None
try:
f = open(archivePath, 'w', encoding="utf-8")
i = 0
for line in f:
if line.find(recordID) != -1:
i += 1
isExistent = 1
oldRecord = line.split()
newRecord = recordToSave.split()
if oldRecord[0] == recordID:
line = joinRecords(newRecord, oldRecord)
records.append(line.strip())
f.close()
with open(archivePath, 'w', encoding="utf-8") as f:
i = 0
for line in f:
if line.find(recordID) != -1:
i += 1
isExistent = 1
oldRecord = line.split()
newRecord = recordToSave.split()
if oldRecord[0] == recordID:
line = joinRecords(newRecord, oldRecord)
records.append(line.strip())
except:
log.message('A new %s archive will be created', archivePath)

if not isExistent:
records.append(recordToSave)

f = open(archivePath, 'w', encoding="utf-8")
for record in records:
f.write('%s\n' % record)
f.close()
with open(archivePath, 'w', encoding="utf-8") as f:
for record in records:
f.write('%s\n' % record)
log.message('Record %s saved in %s sec', recordID, time.perf_counter() - time1)


3 changes: 1 addition & 2 deletions makehuman/compile_targets.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
import os
import zipfile
import fnmatch
import io

def getAllFiles(rootPath, filterStrArr):
result = [ None ]*len(filterStrArr)
Expand Down Expand Up @@ -93,7 +92,7 @@ def getFiles(root, filenames, filterStr):
print('error converting target %s' % path)

print("Writing images list")
with io.open('data/images.list', 'w', encoding="utf-8") as f:
with open('data/images.list', 'w', encoding="utf-8") as f:
allImages = allFiles[1]
for path in allImages:
path = path.replace('\\','/')
Expand Down
5 changes: 2 additions & 3 deletions makehuman/core/algos3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
import numpy as np
import log
from getpath import getSysDataPath, canonicalPath
import io

_targetBuffer = {}

Expand Down Expand Up @@ -128,7 +127,7 @@ def _load_text(self, name):
import makehuman
data = []
license = defaultTargetLicense()
with io.open(name, 'r', encoding='utf-8') as fd:
with open(name, 'r', encoding='utf-8') as fd:
for line in fd:
line = line.strip()
if line.startswith('#'):
Expand Down Expand Up @@ -461,7 +460,7 @@ def saveTranslationTarget(obj, targetPath, groupToSave=None, epsilon=0.001):
license_str = '\n'.join(['# ' + s for s in license_str])

try:
with io.open(targetPath, 'w', encoding='utf-8') as fileDescriptor:
with open(targetPath, 'w', encoding='utf-8') as fileDescriptor:
fileDescriptor.write('%s\n\n\n' % license_str)
for i in range(nVertsExported):
fileDescriptor.write('%d %f %f %f\n' % (vertsToSave[i], delta[i,0], delta[i,1], delta[i,2]))
Expand Down
Loading

0 comments on commit 0d2ac67

Please sign in to comment.