• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python plotting.plot_glass_brain函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中nilearn.plotting.plot_glass_brain函数的典型用法代码示例。如果您正苦于以下问题:Python plot_glass_brain函数的具体用法?Python plot_glass_brain怎么用?Python plot_glass_brain使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了plot_glass_brain函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: plot_bw_coeffs

def plot_bw_coeffs(coeffs, affine, title, base_brightness=.7, cmap=None, output_file=None, black_bg=False):
    def isstr(s): return isinstance(s, str)

    def default_cmap():
        invert_if_black_bg = lambda v: (1 - v) if black_bg else v
        base_brightness_local = invert_if_black_bg(base_brightness)
        end_brightness = invert_if_black_bg(0)
        avg = np.average([base_brightness_local, end_brightness])
        c_range = ((0, base_brightness_local, base_brightness_local),
                   (.33, base_brightness_local, avg),
                   (.67, avg, end_brightness),
                   (1, end_brightness, end_brightness))
        c_dict = {r: c_range for r in ['red', 'green', 'blue']}
        cmap_name = 'bright_bw'
        cmap = LinearSegmentedColormap(cmap_name, c_dict)
        plt.register_cmap(cmap=cmap)
        return cmap

    cmap = plt.get_cmap(cmap) if isstr(cmap) else default_cmap() if cmap is None else cmap

    plot_glass_brain(nib.Nifti1Image(coeffs, affine=affine),
                     title=title,
                     black_bg=black_bg,
                     colorbar=True,
                     output_file=output_file,
                     cmap=cmap,
                     alpha=.15)
开发者ID:bonilhamusclab-projects,项目名称:epi_prediction,代码行数:27,代码来源:epi_prediction.py


示例2: __main__

def __main__():
    volume = image.index_img("E:\\Users\\Niall\\Documents\\Computer Science\\FinalYearProject\\data\\ds105_raw\\ds105\\sub001\\BOLD\\task001_run001\\bold.nii.gz", 0)
    smoothed_img = image.smooth_img(volume, fwhm=5)

    # print("Read the images");

    plotting.plot_glass_brain(volume, title='plot_glass_brain',
    black_bg=True, display_mode='xz')
    plotting.plot_glass_brain(volume, title='plot_glass_brain',
    black_bg=False, display_mode='xz')

    plt.show()

    # print("Finished");



    #gemerate some numbers
    t = np.linspace(1, 10, 2000)  # 2000 points between 1 and 10
    t

    #plot the graph
    plt.plot(t, np.cos(t))
    plt.ylabel('Subject Response')
    plt.show()
开发者ID:niallmcs,项目名称:brainProject,代码行数:25,代码来源:nilearn_test.py


示例3: plot_activation_by_ID

def plot_activation_by_ID(identifier):
	localizer_dataset = datasets.fetch_localizer_contrasts(
		[identifier],
		 n_subjects=2,
		get_tmaps=True)
	localizer_tmap_filename = localizer_dataset.tmaps[1]
	plotting.plot_glass_brain(localizer_tmap_filename,threshold=3)
开发者ID:trichner,项目名称:artibrain,代码行数:7,代码来源:brain_activation.py


示例4: plot_stripes_in_ic

def plot_stripes_in_ic(ic_img, melmix_ic_power, melmix_ic_timecourse, nifti_img, ic,):
    fig = plt.figure()
    fig.subplots_adjust(hspace=0.45, wspace=0.1)     
    
    stripe_magnitude_per_slice = sorted([(find_biggest_stripe_in_slice(ic_img[i]), i) 
                        for i in range(ic_img.shape[0])])    
    i = 1
    for stripe_magnitude, slice_index in stripe_magnitude_per_slice[:4]:
        slice_2d = ic_img[slice_index]        
        dip_sizes, col_means, peaks, valleys, median_filtered_picture = columnwise_signal_dips(slice_2d)                
        stripe_mask = get_stripe_mask(col_means, dip_sizes, valleys)*10
        dips = get_dips(dip_sizes, col_means, valleys)
        
        ax = fig.add_subplot(4, 2, i+2)
        ax.get_yaxis().set_visible(False)
        ax.imshow(median_filtered_picture)
        ax.plot(34 - dips, color='white')
        ax.plot(34-stripe_mask, color='red')        
        plt.title('ic:%s slice:%s magnitude:%s' %(
                ic+1, slice_index, abs(np.round(np.min(dip_sizes),2))))        
        i += 1
    
    ax2 = fig.add_subplot(4, 2, 7)
    ax2.plot(melmix_ic_power)
    plt.title('Powerspectrum')
    ax3 = fig.add_subplot(4, 2, 8)
    ax3.plot(melmix_ic_timecourse)
    plt.title('Timecourse')
    
    ax0 = fig.add_subplot(4,1,1)
    plot_glass_brain(nifti_img, title=ic+1, axes=ax0)
    return fig
开发者ID:dinga92,项目名称:stripe_cleaning_scripts,代码行数:32,代码来源:ica_stripes_plotting.py


示例5: openBold

    def openBold(self):
        file = self.onOpen([('NIFTI files', '*.nii.gz'), ('All files', '*')])
        print("anatomy file: " + file)

        bold = image.index_img(file, 0)

        plotting.plot_glass_brain(bold, title='glass_brain',
    black_bg=True, display_mode='ortho')
        plt.show()
开发者ID:niallmcs,项目名称:brainProject,代码行数:9,代码来源:gui_demo.py


示例6: plot_coeffs

def plot_coeffs(coeffs, affine, neg_disp=.8, pos_disp=.8, **kwargs):

    def default_cmap():
        max_neg_coeff = np.abs(np.min(coeffs))
        max_pos_coeff = np.max(coeffs)
        max_coeff = np.max((max_neg_coeff, max_pos_coeff))

        dev = 0.5

        neg_dev = dev * max_neg_coeff/max_coeff
        pos_dev = dev * max_pos_coeff/max_coeff

        zero = 0.5
        max_neg = zero - neg_dev
        max_pos = zero + pos_dev

        na_clr = .5
        na_start = (0.0, na_clr, na_clr)
        na_end = (1.0, na_clr, na_clr)

        blue_red_bp_map = {
            'red': (
                na_start,
                (max_neg, na_clr, 0.0),
                (zero, 0.0, 1.0),
                (max_pos, 1.0, na_clr),
                na_end
            ),
            'blue': (
                na_start,
                (max_neg, na_clr, 0.0),
                (zero - neg_disp*neg_dev, 1.0, 1.0),
                (zero, 1.0, 0.0),
                (max_pos, 0.0, na_clr),
                na_end
            ),
            'green': (
                na_start,
                (max_neg, na_clr, 1.0),
                (zero - neg_disp*neg_dev, 1.0, 1.0),
                (zero, 0.0, 0.0),
                (zero + pos_disp*pos_dev, pos_disp, pos_disp),
                (max_pos, 1.0, na_clr),
                na_end
            )
            }

        name = 'BlueRedBiPolar'
        return LinearSegmentedColormap(name, blue_red_bp_map)

    img = nib.Nifti1Image(coeffs, affine=affine)
    kwargs['cmap'] = default_cmap()
    plot_glass_brain(img, **kwargs)
开发者ID:bonilhamusclab-projects,项目名称:epi_prediction,代码行数:53,代码来源:epi_prediction.py


示例7: generate_glassbrain_image

def generate_glassbrain_image(image_pk):
    from neurovault.apps.statmaps.models import Image
    import neurovault
    import matplotlib as mpl
    mpl.rcParams['savefig.format'] = 'jpg'
    my_dpi = 50
    fig = plt.figure(figsize=(330.0/my_dpi, 130.0/my_dpi), dpi=my_dpi)
    
    img = Image.objects.get(pk=image_pk)    
    f = BytesIO()
    try:
        glass_brain = plot_glass_brain(img.file.path, figure=fig)
        glass_brain.savefig(f, dpi=my_dpi)
    except:
        # Glass brains that do not produce will be given dummy image
        this_path = os.path.abspath(os.path.dirname(__file__))
        f = open(os.path.abspath(os.path.join(this_path,
                                              "static","images","glass_brain_empty.jpg"))) 
        raise
    finally:
        plt.close('all')
        f.seek(0)
        content_file = ContentFile(f.read())
        img.thumbnail.save("glass_brain_%s.jpg" % img.pk, content_file)
        img.save()
开发者ID:rwblair,项目名称:NeuroVault,代码行数:25,代码来源:tasks.py


示例8: make_glassbrain_image

def make_glassbrain_image(nifti_file,png_img_file=None):
    """Make glassbrain image, optional save image to png file (not vector)"""
    nifti_file = str(nifti_file)
    glass_brain = plot_glass_brain(nifti_file)
    if png_img_file:    
        glass_brain.savefig(png_img_file)
    plt.close('all')
    return glass_brain
开发者ID:vsoch,项目名称:pybraincompare,代码行数:8,代码来源:image.py


示例9: generate_images

def generate_images(components_img, n_components, images_dir, glass=False):
    # Remove existing images
    if os.path.exists(images_dir):
        shutil.rmtree(images_dir)
    os.makedirs(images_dir)
    output_filenames = [osp.join(images_dir, 'IC_{}.png'.format(i))
                        for i in range(n_components)]

    for i, output_file in enumerate(output_filenames):
        plot_stat_map(nibabel.Nifti1Image(components_img.get_data()[..., i],
                                          components_img.get_affine()),
                      display_mode="z", title="IC %d" % i, cut_coords=7,
                      colorbar=False, output_file=output_file)
    if glass:
        output_filenames = [osp.join(images_dir, 'glass_IC_{}.png'.format(i))
                            for i in range(n_components)]
        for i, output_file in enumerate(output_filenames):
            plot_glass_brain(nibabel.Nifti1Image(
                components_img.get_data()[..., i],
                components_img.get_affine()),
                display_mode="ortho", title="IC %d" % i,
                             output_file=output_file)
开发者ID:ajrichardson,项目名称:nilearn_ui,代码行数:22,代码来源:run_canica.py


示例10: glassbrain_allcontrasts

def glassbrain_allcontrasts(path, title, mode='uncorrected',
    cluster_threshold=50):
    ''' For each SPM contrast from a Nipype workflow (`path` points to the base
    directory), generates a glass brain figure with the corresponding
    thresholded map.

    `mode` can be either 'uncorrected' (p<0.001, T>3.1, F>4.69)
                      or 'FWE' (p<0.05, T>4.54, F>8.11).
    `title` is the title displayed on the plot.'''

    nodes = [pickle.load(gzip.open(osp.join(path, e, '_node.pklz'), 'rb'))
        for e in ['modeldesign', 'estimatemodel','estimatecontrasts']]
    _, _, node = nodes

    spm_mat_file = osp.join(node.output_dir(), 'SPM.mat')
    for i in range(1, len(node.inputs.contrasts)+1):
        output_dir = osp.join(path, node._id)

        img = glob(osp.join(output_dir, 'spm*_00%02d.nii'%i))[0]
        contrast_type = osp.split(img)[-1][3]
        print img, contrast_type
        contrast_name = node.inputs.contrasts[i-1][0]

        thresholded_map1, threshold1 = map_threshold(img, threshold=0.001,
            cluster_threshold=cluster_threshold)
        if mode == 'uncorrected':
            threshold1 = 3.106880 if contrast_type == 'T' else 4.69
            pval_thresh = 0.001
        elif mode == 'FWE':
            threshold1 = 4.5429 if contrast_type == 'T' else 8.1101
            pval_thresh = 0.05

        plotting.plot_glass_brain(thresholded_map1, colorbar=True, black_bg=True,
            display_mode='ortho', threshold=threshold1,
            title='(%s) %s - %s>%.02f - p<%s (%s)'
            %(title, contrast_name, contrast_type, threshold1, pval_thresh,
            mode))
开发者ID:xgrg,项目名称:alfa,代码行数:37,代码来源:nilearn-helper.py


示例11: create_glassbrain_image

def create_glassbrain_image(self, mlmodel_id):
    from nilearn.plotting import plot_glass_brain
    import pylab as plt

    model = MLModel.query.get(mlmodel_id)
    if not model:
        return

    my_dpi = 50
    fig = plt.figure(figsize=(330.0/my_dpi, 130.0/my_dpi), dpi=my_dpi)

    output_dir = model_dir(mlmodel_id)
    stat_map_img = os.path.join(output_dir, model.output_data['weightmap'])

    glass_brain = plot_glass_brain(stat_map_img, figure=fig)

    glass_brain_filename = 'glassbrain.png'
    glass_brain_path = os.path.join(output_dir, glass_brain_filename)
    glass_brain.savefig(glass_brain_path, dpi=my_dpi)

    model.output_data = dict(glassbrain=glass_brain_filename,
                             **model.output_data)
    db.session.commit()
开发者ID:neurolearn,项目名称:neurolearn-web,代码行数:23,代码来源:tasks.py


示例12: zip

import glob
import numpy as np
import featurespace_fun as fsf
import matplotlib.pyplot as plt
from nilearn.masking import apply_mask
from nilearn.input_data import MultiNiftiMasker
from scipy.stats import norm
from nilearn.plotting import plot_glass_brain
from scipy.stats import ttest_1samp
import sys
from nibabel import load

mask = 'brainmask_group_template.nii.gz'

maps = [sorted(glob.glob('MaThe/maps/mni/model_depcor_{}*subj_*'.format(model))) for model in ['lda', 'speaker', 'emotions']]

valid_subjs = [mp.split('_')[-2] for mp in maps[0]]

for subj, ft_maps in zip(sorted(valid_subjs), zip(*maps)):
    display = plot_glass_brain(None, plot_abs=False, threshold=0.001)
    for ind_ft_map, color in zip(ft_maps, ['r', 'b', 'g']):
        level_thr = np.percentile(apply_mask('MaThe/avg_maps/'+ind_ft_map.split('/')[-1], mask), 99.9)
        display.add_contours(ind_ft_map, colors=[color], levels=[level_thr], alpha=0.6, linewidths=3.0)
    display.savefig('contours_tvals_subj_{}.png'.format(subj))
    plt.close()

开发者ID:mjboos,项目名称:encfg,代码行数:25,代码来源:make_contour_plots_tvals.py


示例13: fetch_localizer_contrasts

data = fetch_localizer_contrasts(["left vs right button press"], n_subjects,
                                 get_tmaps=True)

###########################################################################
# Display subject t_maps
# ----------------------
# We plot a grid with all the subjects t-maps thresholded at t = 2 for
# simple visualization purposes. The button press effect is visible among
# all subjects
from nilearn import plotting
import matplotlib.pyplot as plt
subjects = [subject_data[0] for subject_data in data['ext_vars']]
fig, axes = plt.subplots(nrows=4, ncols=4)
for cidx, tmap in enumerate(data['tmaps']):
    plotting.plot_glass_brain(tmap, colorbar=False, threshold=2.0,
                              title=subjects[cidx],
                              axes=axes[int(cidx / 4), int(cidx % 4)],
                              plot_abs=False, display_mode='z')
fig.suptitle('subjects t_map left-right button press')
plt.show()

############################################################################
# Estimate second level model
# ---------------------------
# We define the input maps and the design matrix for the second level model
# and fit it.
import pandas as pd
second_level_input = data['cmaps']
design_matrix = pd.DataFrame([1] * len(second_level_input),
                             columns=['intercept'])

############################################################################
开发者ID:alpinho,项目名称:nistats,代码行数:32,代码来源:plot_second_level_one_sample_test.py


示例14: ROIs

    get_anats=True,
    get_tmaps=True)
localizer_anat_filename = localizer_dataset.anats[1]
localizer_cmap_filename = localizer_dataset.cmaps[1]
localizer_tmap_filename = localizer_dataset.tmaps[1]

###############################################################################
# demo the different plotting functions

# Plotting statistical maps
plotting.plot_stat_map(localizer_cmap_filename, bg_img=localizer_anat_filename,
                       threshold=3, title="plot_stat_map",
                       cut_coords=(36, -27, 66))

# Plotting glass brain
plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          threshold=3)

# Plotting anatomical maps
plotting.plot_anat(haxby_anat_filename, title="plot_anat")

# Plotting ROIs (here the mask)
plotting.plot_roi(haxby_mask_filename, bg_img=haxby_anat_filename,
                  title="plot_roi")

# Plotting EPI haxby
mean_haxby_img = image.mean_img(haxby_func_filename)
plotting.plot_epi(mean_haxby_img, title="plot_epi")

import matplotlib.pyplot as plt
plt.show()
开发者ID:schwarty,项目名称:nilearn,代码行数:31,代码来源:plot_demo_plotting.py


示例15: ROIs

plotting.plot_stat_map(localizer.cmaps[3], bg_img=localizer.anats[3],
                       threshold=3, title="plot_stat_map",
                       cut_coords=(36, -27, 66))

# Plotting anatomical maps
plotting.plot_anat(haxby.anat[0], title="plot_anat")

# Plotting ROIs (here the mask)
plotting.plot_roi(haxby.mask_vt[0], bg_img=haxby.anat[0], title="plot_roi")

# Plotting EPI haxby
mean_haxby_img = image.mean_img(haxby.func[0])
plotting.plot_epi(mean_haxby_img, title="plot_epi")

# Plotting glass brain
plotting.plot_glass_brain(localizer.tmaps[3], title='plot_glass_brain',
                          threshold=3)

plotting.plot_glass_brain(localizer.tmaps[3], title='plot_glass_brain',
                          black_bg=True, display_mode='xz', threshold=3)

###############################################################################
# demo the different display_mode

plotting.plot_stat_map(localizer.cmaps[3], display_mode='ortho',
                       cut_coords=(36, -27, 60),
                       title="display_mode='ortho', cut_coords=(36, -27, 60)")

plotting.plot_stat_map(localizer.cmaps[3], display_mode='z', cut_coords=5,
                       title="display_mode='z', cut_coords=5")

plotting.plot_stat_map(localizer.cmaps[3], display_mode='x', cut_coords=(-36, 36),
开发者ID:andreas-koukorinis,项目名称:gaelvaroquaux.github.io,代码行数:32,代码来源:plot_demo_plotting.py


示例16: background

###############################################################################
# Retrieve data from Internet
# ---------------------------

from nilearn import datasets

motor_images = datasets.fetch_neurovault_motor_task()
stat_img = motor_images.images[0]

###############################################################################
# Glass brain plotting: whole brain sagittal cuts
# -----------------------------------------------

from nilearn import plotting

plotting.plot_glass_brain(stat_img, threshold=3)

###############################################################################
# Glass brain plotting: black backgrond
# -------------------------------------
# On a black background (option "black_bg"), and with only the x and
# the z view (option "display_mode").
plotting.plot_glass_brain(
    stat_img, title='plot_glass_brain',
    black_bg=True, display_mode='xz', threshold=3)

###############################################################################
# Glass brain plotting: Hemispheric sagittal cuts
# -----------------------------------------------
plotting.plot_glass_brain(stat_img,
                          title='plot_glass_brain with display_mode="lyrz"',
开发者ID:bthirion,项目名称:nilearn,代码行数:31,代码来源:plot_demo_glass_brain.py


示例17:

"""
Glass brain plotting in nilearn
===============================

See :ref:`plotting` for more plotting functionalities.
"""

from nilearn import datasets
from nilearn import plotting

###############################################################################
# Retrieve the data

localizer_dataset = datasets.fetch_localizer_contrasts(
    ["left vs right button press"],
    n_subjects=2,
    get_tmaps=True)
localizer_tmap_filename = localizer_dataset.tmaps[1]

###############################################################################
# demo glass brain plotting

plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          threshold=3)

plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          black_bg=True, display_mode='xz', threshold=3)

import matplotlib.pyplot as plt
plt.show()
开发者ID:MartinPerez,项目名称:nilearn.github.io,代码行数:30,代码来源:plot_demo_plotting_glass_brain.py


示例18: plot_design_matrix

    columns=['vertical vs horizontal'] + subjects)

############################################################################
# plot the design_matrix
from nistats.reporting import plot_design_matrix
plot_design_matrix(design_matrix)

############################################################################
# formally specify the analysis model and fit it
from nistats.second_level_model import SecondLevelModel
second_level_model = SecondLevelModel().fit(
    second_level_input, design_matrix=design_matrix)

##########################################################################
# Estimating the contrast is very simple. We can just provide the column
# name of the design matrix.
z_map = second_level_model.compute_contrast('vertical vs horizontal',
                                            output_type='z_score')

###########################################################################
# We threshold the second level contrast and plot it
threshold = 3.1  # correponds to  p < .001, uncorrected
display = plotting.plot_glass_brain(
    z_map, threshold=threshold, colorbar=True, plot_abs=False,
    title='vertical vs horizontal checkerboard (unc p<0.001')

###########################################################################
# Unsurprisingly, we see activity in the primary visual cortex, both positive and negative.

plotting.show()
开发者ID:alpinho,项目名称:nistats,代码行数:30,代码来源:plot_second_level_two_sample_test.py


示例19: ROIs

# uncomment this to open the plot in a web browser:
# view.open_in_browser()

##############################################################################
# In a Jupyter notebook, if ``view`` is the output of a cell, it will
# be displayed below the cell

view

###############################################################################
# Plotting statistical maps in a glass brain with function `plot_glass_brain`
# ---------------------------------------------------------------------------
#
# Now, the t-map image is mapped on glass brain representation where glass
# brain is always a fixed background template
plotting.plot_glass_brain(stat_img, title='plot_glass_brain',
                          threshold=3)

###############################################################################
# Plotting anatomical images with function `plot_anat`
# -----------------------------------------------------
#
# Visualizing anatomical image of haxby dataset
plotting.plot_anat(haxby_anat_filename, title="plot_anat")

###############################################################################
# Plotting ROIs (here the mask) with function `plot_roi`
# -------------------------------------------------------
#
# Visualizing ventral temporal region image from haxby dataset overlayed on
# subject specific anatomical image with coordinates positioned automatically on
# region of interest (roi)
开发者ID:jeromedockes,项目名称:nilearn,代码行数:32,代码来源:plot_demo_plotting.py


示例20: zip

p001_unc = norm.isf(0.001)

############################################################################
# Prepare figure for concurrent plot of individual maps
from nilearn import plotting
import matplotlib.pyplot as plt

fig, axes = plt.subplots(nrows=2, ncols=5, figsize=(8, 4.5))
model_and_args = zip(models, models_run_imgs, models_events, models_confounds)
for midx, (model, imgs, events, confounds) in enumerate(model_and_args):
    # fit the GLM
    model.fit(imgs, events, confounds)
    # compute the contrast of interest
    zmap = model.compute_contrast('language-string')
    plotting.plot_glass_brain(zmap, colorbar=False, threshold=p001_unc,
                              title=('sub-' + model.subject_label),
                              axes=axes[int(midx / 5), int(midx % 5)],
                              plot_abs=False, display_mode='x')
fig.suptitle('subjects z_map language network (unc p<0.001)')
plotting.show()

#########################################################################
# Second level model estimation
# -----------------------------
# We just have to provide the list of fitted FirstLevelModel objects
# to the SecondLevelModel object for estimation. We can do this because
# all subjects share a similar design matrix (same variables reflected in
# column names)
from nistats.second_level_model import SecondLevelModel
second_level_input = models

#########################################################################
开发者ID:alpinho,项目名称:nistats,代码行数:32,代码来源:plot_bids_analysis.py



注:本文中的nilearn.plotting.plot_glass_brain函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python plotting.plot_roi函数代码示例发布时间:2022-05-27
下一篇:
Python plotting.plot_connectome函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap