本文整理汇总了Python中wwjufsdatabase.libs.utils.transform.transformDirToInternal函数的典型用法代码示例。如果您正苦于以下问题:Python transformDirToInternal函数的具体用法?Python transformDirToInternal怎么用?Python transformDirToInternal使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了transformDirToInternal函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: processItem
def processItem(self, job, item):
monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
fullPath = transform.transformDirToInternal(item["fullPath"])
# Check if item exists in local file sytem
if not os.path.exists(fullPath):
job.delete()
return
if not self.itemToProcess.has_key(monitoringFullPath):
self.itemToProcess[monitoringFullPath] = {}
#############################################
# Start processing
#############################################
# If the full path already in tube, check if the timestamp is updated
if self.itemToProcess[monitoringFullPath].has_key(fullPath):
savedItem = self.itemToProcess[monitoringFullPath][fullPath]
if savedItem["timestamp"] == item["timestamp"]:
# Item not updated for time out time, add it to output queue
self.outputBeanstalk.put(job.body)
print "output item:", item
job.delete()
elif savedItem["timestamp"] < item["timestamp"]:
# Received a new notification for an path, update saved info
self.itemToProcess[monitoringFullPath][fullPath] = item
job.release(priority=beanstalkc.DEFAULT_PRIORITY, delay=gItemDelayTime)
print "item updated"
else:
job.delete()
else:
# New notification, add it
self.itemToProcess[monitoringFullPath][fullPath] = item
# print item, job, gItemDelayTime
# priority is necessary to avoid error for requesting priority to be an int in beanstalkc
job.release(priority=beanstalkc.DEFAULT_PRIORITY, delay=gItemDelayTime)
print "new item added"
开发者ID:weijia,项目名称:ufs,代码行数:34,代码来源:tubeDelayServiceV3.py
示例2: processItem
def processItem(self, job, item):
#fullPath = transform.transformDirToInternal(item["fullPath"])
#monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
#source_dir = item["SourceDir"]
#misc.ensureDir(source_dir)
tag = item["tag"]
working_dir = item["WorkingDir"]
misc.ensureDir(transform.transformDirToInternal(working_dir))
target_dir = item["TargetDir"]
misc.ensureDir(transform.transformDirToInternal(target_dir))
import wwjufsdatabase.libs.services.servicesV2 as service
req = service.req()
t = tagSystem.getTagSysObj(req.getDbSys())
e = t.getObjs(unicode(tag))
for i in e:
print i
source_dir = transform.transformDirToInternal(i)
AutoArchiveThumb(source_dir, target_dir, working_dir)
job.delete()
return False
#Return true only when the item should be kept in the tube
return True
开发者ID:weijia,项目名称:ufs,代码行数:28,代码来源:TagProcessServiceBase.py
示例3: encInfoZip
def encInfoZip(self, pendingCollection):
############################
# Save info for zipped files
############################
logFilePath = transform.transformDirToInternal(
fileTools.getTimestampWithFreeName(self.workingDir, '.log'))
s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4)
f = open(logFilePath,'w')
f.write(s)
f.close()
logZipPath = logFilePath.replace(u'.log',u'.log.zip')
logZip = zipClass.ZFile(logZipPath, 'w')
logZip.addfile(unicode(logFilePath), os.path.basename(logFilePath))
logZip.close()
gTimeV = time.gmtime()
yearStr = time.strftime("%Y", gTimeV)
monthStr = time.strftime("%m", gTimeV)
dayStr = time.strftime("%d", gTimeV)
dateTimeDir = yearStr+"/"+monthStr+"/"+dayStr
newEncDir = unicode(os.path.join(self.zipStorageDir, dateTimeDir))
misc.ensureDir(newEncDir)
targetPath = transform.transformDirToInternal(
fileTools.getTimestampWithFreeName(newEncDir, '.enc'))
self.encCopier.copy(logZipPath, targetPath.replace('.enc', '.encziplog'))
############################
# Update state in storage state
############################
self.updateZipLog(self.zippedFileInfo, pendingCollection)
#Clean the current zipped file info
self.zippedFileInfo = {}
开发者ID:weijia,项目名称:ufs,代码行数:33,代码来源:encZipInfoCollection.py
示例4: AutoArchiveThumb
def AutoArchiveThumb(source_folder = gAutoArchiveFullPath, target_dir = g_default_target_dir,
workingDir = gWorkingDir,taskUuid = str(uuid.uuid4())):
inputTubeName = "collectionListTube"+taskUuid
delayedCollectionListTubeName = "delayedCollectionListTubeName"+taskUuid
#s1 = FolderScanner.FolderScanner()
#s1.addItem({"command": "folderScanner", "fullPath":source_folder,
# "targetTubeName": inputTubeName,"blackList":g_ignore_file_type_list})
target_dir = transform.transformDirToInternal(target_dir)
source_folder = transform.transformDirToInternal(source_folder)
#storage_state_collection_name = "storage_state://"+source_folder+":"+target_dir
s1 = FolderEnumeratingService.FolderEnumeratingService()
s1.addItem({"full_path": source_folder, "black_list":[],
"target_tube_name": inputTubeName})
s2 = monitorService.monitorService()
s2.addItem({"command": "monitorService", "fullPath":source_folder,
"targetTubeName": inputTubeName,"blackList":g_ignore_file_type_list})
s3 = tubeDelayService()
s3.addItem({"inputTubeName":inputTubeName,
"outputTubeName": delayedCollectionListTubeName,"blackList":g_ignore_file_type_list})
s4 = FolderInfoArchiveService()
s4.addItem({"InputTubeName":delayedCollectionListTubeName, "WorkingDir":workingDir, "TargetDir": target_dir})
开发者ID:weijia,项目名称:ufs,代码行数:26,代码来源:zippedCollectionListHandlerThumbClientV2.py
示例5: __init__
def __init__(self, rootPath, fullPath, itemInfo, zipFileObj, pathInZipFile):
folderStorage.folderStorageItem.__init__(self, rootPath, fullPath)
self.itemInfo = itemInfo
self.rootPath = transform.transformDirToInternal(rootPath)
self.fullPath = transform.transformDirToInternal(fullPath)
self.zipFileObj = zipFileObj
self.pathInZipFile = pathInZipFile
开发者ID:weijia,项目名称:ufs,代码行数:7,代码来源:encZipStorageV2.py
示例6: __init__
def __init__(self, rootDir, backupDir, syncFolderCollectionId, dbInst):
'''
syncFolderCollectionId is a virtual collection which contains all items with synced info
'''
collectionDatabase.collectionOnMongoDbBase.__init__(self, syncFolderCollectionId, dbInst.getCollectionDb())
self.rootDir = transform.transformDirToInternal(rootDir)
self.backupDir = transform.transformDirToInternal(backupDir)
self.objDb = dbInst
self.folderCollection = folderRecursiveEnumCollection.folderRecursiveEnumCollection(self.rootDir, dbInst)
开发者ID:weijia,项目名称:ufs,代码行数:9,代码来源:syncFolderCollection.py
示例7: processItem
def processItem(self, job, item):
#fullPath = transform.transformDirToInternal(item["fullPath"])
#monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
#source_dir = item["SourceDir"]
#misc.ensureDir(source_dir)
tag = item["tag"]
task_item = item
if item.has_key("output_tube_name"):
#################################
# Adding tag processing task
#################################
task_item = item
self.processing_tag_dict[tag] = item
import wwjufsdatabase.libs.services.servicesV2 as service
req = service.req()
t = tagSystem.getTagSysObj(req.getDbSys())
tagged_item_list = t.getObjs(unicode(tag))
else:
#################################
# A new tag added for existing tag processing task
#################################
if self.processing_tag_dict.has_key(tag):
#Task exist, add the new tagged elment for processing
task_item = self.processing_tag_dict[tag]
tagged_item_list = [transform.transformDirToInternal(item["url"])]
else:
#Not a valid item, return
print "not a valid item or tag not have processor yet"
job.delete()
return False
output_tube_name = task_item["output_tube_name"]
working_dir = task_item["working_dir"]
misc.ensureDir(transform.transformDirToInternal(working_dir))
target_dir = task_item["target_dir"]
misc.ensureDir(transform.transformDirToInternal(target_dir))
b = beanstalkServiceBase(output_tube_name)
for i in tagged_item_list:
info(i)
source_dir = transform.transformDirToInternal(i)
b.addItem({"source_dir":source_dir, "working_dir": working_dir, "target_dir":target_dir})
job.delete()
return False
开发者ID:weijia,项目名称:ufs,代码行数:55,代码来源:TagProcessServiceV2.py
示例8: callback
def callback(self, pathToWatch, relativePath, changeType):
fullPath = transform.transformDirToInternal(os.path.join(pathToWatch, relativePath))
itemDict = {"monitoringPath": transform.transformDirToInternal(pathToWatch),
"fullPath": fullPath, "changeType":changeType,
"timestamp": time.time()}
s = json.dumps(itemDict, sort_keys=True, indent=4)
beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort)
beanstalk.use(self.targetTube)
#print beanstalk.using()
s = json.dumps(itemDict, sort_keys=True, indent=4)
job = beanstalk.put(s)
开发者ID:weijia,项目名称:ufs,代码行数:11,代码来源:monitorServiceV2.py
示例9: __init__
def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'):
print "src root is:", srcRoot
self.srcRoot = transform.transformDirToInternal(srcRoot)
self.storageRoot = transform.transformDirToInternal(storageRoot)
self.stateStoragePath = stateStoragePath
try:
f = open(self.stateStoragePath,'r')
self.config = json.load(f)
f.close()
except IOError:
self.config = {}
开发者ID:weijia,项目名称:ufs,代码行数:11,代码来源:archiveStorageBase.py
示例10: store
def store(self, element):
#print 'storing....'
fullPath = transform.transformDirToInternal(element.getAbsPath())
relPath = fullPath.replace(self.srcRoot, '')
if (self.curArchive is None) or (self.curArchivedSize > MAX_SINGLE_ARCHIVE_SIZE):
self.curArchiveName = transform.transformDirToInternal(
fileTools.getTimestampWithFreeName(self.storageRoot, '.zip'))
self.curArchive = zipClass.ZFile(self.curArchiveName, 'w')
self.curArchivedSize = 0
#print 'copying "%s" to "%s"'%(fullPath, relPath)
self.curArchive.addfile(unicode(fullPath).encode('gbk'), unicode(relPath).encode('gbk'))
self.curArchivedSize += os.stat(fullPath).st_size
开发者ID:weijia,项目名称:ufs,代码行数:12,代码来源:zipStorageV2.py
示例11: processItem
def processItem(self, job, item):
monitoringFullPath = transform.transformDirToInternal(item['monitoringPath'])
archiveId = gZipFolderCollectionPrefix + monitoringFullPath
if not self.collectionInDbForMonitoringPath.has_key(monitoringFullPath):
self.collectionInDbForMonitoringPath[monitoringFullPath] = collectionDatabase.collectionOnMongoDbBase(archiveId, self.dbInst.getCollectionDb())
objUuid = self.dbInst.addVirtualObj({"monitoringPath": monitoringFullPath, "zippedInfoCollectionId": archiveId});
idInCol = objUuid
self.zippedInfoCollectionList.addObj(idInCol, objUuid)
#Save the item in the archive collection: zippedInfoColllection://D:/tmp/
fullPath = transform.transformDirToInternal(item["fullPath"])
relativePath = transform.getRelativePathFromFull(fullPath, monitoringFullPath)
if not os.path.exists(fullPath):
job.delete()
return False#No job release, job was deleted.
#################################################################
# Start process the
#################################################################
if not self.collectionInDbForMonitoringPath[monitoringFullPath].exists(relativePath):
#This item is not in the collection, so we need to extract info from this item
newObj = self.dbInst.getFsObjFromFullPath(fullPath)
self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"])
zipFilePath = transform.transformDirToInternal(
fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix))
self.decCopier.copy(fullPath, zipFilePath)
for i in zippedInfo(self.workingDir).enumItems(zipFilePath):
print '--------------------------------------------------'
print i
fp = open(i, 'r')
loadedFileInfo = json.load(fp)
print loadedFileInfo
for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath):
fp = open(i, 'r')
print 'data file extracted:', i
'''
else:
#This item is not in the collection, so we need to extract info from this item
newObj = self.dbInst.getFsObjFromFullPath(fullPath)
self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"])
zipFilePath = transform.transformDirToInternal(
fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix))
self.decCopier.copy(fullPath, zipFilePath)
for i in zippedInfo(self.workingDir).enumItems(zipFilePath):
print '--------------------------------------------------'
print i
fp = open(i, 'r')
loadedFileInfo = json.load(fp)
print loadedFileInfo
for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath):
fp = open(i, 'r')
print 'data file extracted:', i
'''
return True#Release job
开发者ID:weijia,项目名称:ufs,代码行数:52,代码来源:zippedCollectionListHandlerV2.py
示例12: checkDirChanges
def checkDirChanges(path_to_watch, busname = BUS_NAME_NAME, interfacename = INTERFACE_NAME, objname = OBJ_NAME):
path_to_watch = transform.transformDirToInternal(os.path.abspath (path_to_watch))
need_to_quit = False
print "Watching %s at %s" % (path_to_watch, time.asctime ())
hDir = win32file.CreateFile(
path_to_watch,
win32con.GENERIC_READ,
win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE,
None,
win32con.OPEN_EXISTING,
win32con.FILE_FLAG_BACKUP_SEMANTICS,
None
)
cnt = 0
bus = dbus.SessionBus()
proxy = bus.get_object(busname, objname)
###############################################
#Scan for existing files
###############################################
for i in os.walk(path_to_watch):
print i
for j in i[2]:
fullPath = transform.transformDirToInternal(os.path.join(i[0], j))
#print fullPath
proxy.notify(path_to_watch, fullPath, "Existing", False, dbus_interface = interfacename)
while not need_to_quit:
# print "new watch\n"
results = win32file.ReadDirectoryChangesW(
hDir,
1024*256,
True,
win32con.FILE_NOTIFY_CHANGE_FILE_NAME
| win32con.FILE_NOTIFY_CHANGE_DIR_NAME
| win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES
| win32con.FILE_NOTIFY_CHANGE_SIZE
| win32con.FILE_NOTIFY_CHANGE_LAST_WRITE
| win32con.FILE_NOTIFY_CHANGE_SECURITY,
None,
None
)
if not need_to_quit:
for action, file in results:
#full_filename = os.path.join (self.path_to_watch, file)
#print full_filename, ACTIONS.get (action, "Unknown")
#callback(self.path_to_watch, file, ACTIONS.get (action, "Unknown"))
print 'filechanged called:', path_to_watch, file, ACTIONS.get (action, "Unknown")
proxy.notify(path_to_watch, file, ACTIONS.get (action, "Unknown"), True, dbus_interface = interfacename)
开发者ID:weijia,项目名称:ufs,代码行数:48,代码来源:dirMonitorV2.py
示例13: genPicThumb
def genPicThumb(local_path, dest_dir, mime_type = None):
#If no thumbnail exist, create one
#print '-----------------------localpath:',local_path
basename = os.path.basename(local_path)
#print "basename:" + basename
ext = basename.split(".")[-1]
#print ext
#if picFormatSupported(ext):
if picFormatSupportedV2(local_path, mime_type = None):
#It is a jpeg file, currently no other type supported
import Image #Using PIL lib
im = Image.open(local_path)
# convert to thumbnail image
im.thumbnail((g_default_thumb_size, g_default_thumb_size), Image.ANTIALIAS)
# don't save if thumbnail already exists
#Use _T as the thumb file end to indicate the end of the original firl
thumb_path_without_ext = os.path.join(dest_dir, basename.split(".")[0]+"_T")
import random
while os.path.exists(thumb_path_without_ext+".jpg"):
thumb_path_without_ext += str(random.randint(0,10))
thumb_path = thumb_path_without_ext+'.jpg'
#print thumb_path.encode("utf8","replace")
if im.mode != "RGB":
im = im.convert("RGB")
im.save(thumb_path, "JPEG")
return transform.transformDirToInternal(thumb_path)
else:
print 'non jpeg file not supported'
raise pictureFormatNotSupported
开发者ID:weijia,项目名称:ufs,代码行数:30,代码来源:picThumbGenerator.py
示例14: store
def store(self, processingObj, pendingCollection):
'''
processingObj = {"fullPath": "D:/tmp/good.txt", "size":100}
'''
ncl(processingObj)
#relaPath = transform.formatRelativePath(item.getRelaPath())
relaPath = processingObj.getIdInCol()
ncl('Got relaPath')
if (pendingCollection.has_key(relaPath)) and (pendingCollection[relaPath] != processingObj["uuid"]):
#Item exists in pending but uuid is not the same, update the uuid for the pending item
pendingCollection[relaPath] = processingObj["uuid"]
cl('Added to pending')
fullPath = transform.transformDirToInternal(processingObj["fullPath"])
#Add the file to zip
try:
#If there is already an item with the same name, ignore the current?
existingElem = self.zippedFileInfo[relaPath]
return
except:
pass
if (self.fileCnt > MAX_FILE_CNT_IN_INFO_FILE):
self.encInfoZip(pendingCollection)
processingObj["parentEncZip"] = self.targetPath.replace(".zip", ".enc")
self.zippedFileInfo[relaPath] = processingObj.getItemInfo()
cl('return from store')
开发者ID:weijia,项目名称:ufs,代码行数:29,代码来源:encZipInfoCollection.py
示例15: run
def run(self):
print 'Start scanning'
if not os.path.isdir(self.rootFolder):
print "not a folder"
if filter(self.rootFolder, self.blackList):
return
paramDict = {"fullPath": self.rootFolder, "timestamp": os.stat(self.rootFolder)[ST_MTIME],
"monitoringPath": self.rootFolder}
self.addItem(paramDict)
else:
for root, dirs, files in os.walk(self.rootFolder):
#Break if quit called
if self.quit_flag:
break
#cl("remaining:", dirs)
#Process files
for j in dirs:
info(j)
if filter(j, self.blackList):
info("ignoring: ", j, "\n")
continue
fullPath = transform.transformDirToInternal(os.path.join(root, j))
paramDict = {"fullPath": fullPath, "timestamp": os.stat(fullPath)[ST_MTIME],
"monitoringPath": self.rootFolder}
self.addItem(paramDict)
print "process complete, quitting thread"
开发者ID:weijia,项目名称:ufs,代码行数:28,代码来源:FolderEnumeratingService.py
示例16: __init__
def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'):
#print 'src root:',srcRoot
self.srcRoot = transform.transformDirToInternal(srcRoot)
self.storageRoot = transform.transformDirToInternal(storageRoot)
#print self.srcRoot
#print self.storageRoot
self.stateStoragePath = stateStoragePath
try:
f = open(self.stateStoragePath,'r')
self.config = json.load(f)
f.close()
except IOError:
self.config = {}
#print 'storage root:', self.storageRoot
self.curArchivedSize = 0
self.curArchive = None
开发者ID:weijia,项目名称:ufs,代码行数:16,代码来源:zipStorageV2.py
示例17: __init__
def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt',
tmpStorageRoot = 'd:/tmp/removeAfterComplete', decCopier = encryptionStorageBase.arc4DecSimpleCopier('defaultPass')):
misc.ensureDir(tmpStorageRoot)
misc.ensureDir(storageRoot)
zipStorage.zipStorage.__init__(self, srcRoot, storageRoot, stateStoragePath)
self.tmpStorageRoot = transform.transformDirToInternal(tmpStorageRoot)
self.decCopier = decCopier
开发者ID:weijia,项目名称:ufs,代码行数:7,代码来源:zipDecStorage.py
示例18: initParam
def initParam(self, zipDir, folderDir, workingDir, encryptionPass, direction):
#################################
#Make dir if not exist
#################################
misc.ensureDir(zipDir)
misc.ensureDir(workingDir)
misc.ensureDir(folderDir)
self.configPath = os.path.join(workingDir, 'workingState.txt')
self.backupPath = os.path.join(workingDir, 'backup')
misc.ensureDir(self.backupPath)
self.tmpStorageRoot = transform.transformDirToInternal(os.path.join(workingDir, 'working'))
self.config = configDict.configFileDict(self.configPath, {"zipStorageState":{}, "folderState":{}})
#################################
#Create source storage
#################################
self.storage1 = encZipStorage.encZipStorage(self.config["zipStorageState"],
self.tmpStorageRoot, zipDir, encryptionPass)
#################################
#Create target storage
#################################
self.storage2 = folderStorage.folderStorage(self.config["folderState"],
folderDir, self.backupPath)
if direction == "extract":
self.srcStorage = self.storage1
self.dstStorage = self.storage2
else:
self.srcStorage = self.storage2
self.dstStorage = self.storage1
开发者ID:weijia,项目名称:ufs,代码行数:31,代码来源:encytpedZipSync.py
示例19: subClassRun
def subClassRun(self, paramDict):
###############################################
#Scan for existing files
###############################################
collection = self.objDb.getCollection(self.targetCollectionId)
cl('start scanning')
for i in os.walk(self.rootFolder):
#cl(i)
for j in i[2]:
if (self.addedItemCnt % 1000) == 0:
cl("processing item cnt:", self.addedItemCnt)
self.addedItemCnt += 1
fullPath = transform.transformDirToInternal(os.path.join(i[0], j))
#print '---------------------real adding item'
#Update the item info for the item
ncl('before fs obj base')
itemUrl = objectDatabase.fsObjBase(fullPath).getObjUrl()
ncl('before get fs obj')
newObjUuid = self.objDb.getFsObjUuid(itemUrl)
if newObjUuid is None:
cl("item deleted, do not add it")
continue
ncl('before update obj uuid')
'''
collection.updateObjUuidIfNeeded(itemUrl, newObjUuid)
'''
if collection.isSame(itemUrl, newObjUuid):
ncl("no updates needed", itemUrl, newObjUuid)
continue
collection.updateObjUuidRaw(itemUrl, newObjUuid)
ncl('new item added', itemUrl)
cl("notifying listener")
self.notifyAll()
开发者ID:weijia,项目名称:console-window,代码行数:35,代码来源:collectionMonitorNonRealtimeNotifierServiceV2.py
示例20: internal_get_thumb
def internal_get_thumb(path, targetDir, mime_type = None):
'''
path: Full Path. The path of the file whose thumbnail will be generated
targetDir: Directory Path. The target directory where the generated thumbnail will be put in.
Return: the thumbnail fullPath
'''
newPath = None
ext = path.split('.')[-1].lower()
if ext in ['exe']:
try:
newPath = appThumb.genAppThumb(path, targetDir)
except:
return None
else:
try:
newPath = picThumbGenerator.genPicThumb(path, targetDir, mime_type)
except picThumbGenerator.pictureFormatNotSupported:
if ext in g_video_file_ext_list:
try:#if True:
newPath = ffmpegThumb.genVideoThumb(path, targetDir)
#return "complete transform"
#return newPath
except:
pass
else:
pass
if newPath is None:
return None
return transformDirToInternal(newPath)
开发者ID:weijia,项目名称:ufs,代码行数:29,代码来源:ThumbServiceBase.py
注:本文中的wwjufsdatabase.libs.utils.transform.transformDirToInternal函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论