收藏本站 劰载中...网站公告 | 吾爱海洋论坛交流QQ群:835383472

Datawhale 智慧海洋建设-Task2 数据分析

[复制链接]
+ |# c$ Q; b1 O8 Q4 B

此部分为智慧海洋建设竞赛的数据分析模块,通过数据分析,可以熟悉数据,为后面的特征工程做准备,欢迎大家后续多多交流。

赛题:智慧海洋建设 , d% Y% w+ z8 [2 e

数据分析的目的:

! S! Z. f( r1 z+ `- U EDA的主要价值在于熟悉整个数据集的基本情况(缺失值、异常值),来确定所获得数据集可以用于接下来的机器学习或者深度学习使用。了解特征之间的相关性、分布,以及特征与预测值之间的关系。为进行特征工程提供理论依据。

项目地址:https://github.com/datawhalechina/team-learning-data-mining/tree/master/wisdomOcean比赛地址:https://tianchi.aliyun.com/competition/entrance/231768/introduction?spm=5176.12281957.1004.8.4ac63eafE1rwsY

8 a1 z2 `7 b, W7 V7 k! t* o

2.1 学习目标

学习如何对数据集整体概况进行分析,包括数据集的基本情况(缺失值、异常值)学习了解变量之间的相互关系、变量与预测值之间的存在关系。完成相应学习打卡任务

2.2 内容介绍

数据总体了解读取数据集并了解数据集的大小,原始特征维度;通过info了解数据类型;粗略查看数据集中各特征的基本统计量缺失值和唯一值查看数据缺失值情况查看唯一值情况

数据特性和特征分布

4 i y1 p8 |( z7 N5 q& J

三类渔船轨迹的可视化坐标序列可视化三类渔船速度和方向序列可视化三类渔船速度和方向的数据分布

作业一:剔除异常点后画图

import pandas as pd

g/ q* X$ C2 V0 J1 g' H: W, E

import geopandas as gpd

7 O% D0 c1 G' h, S. U. S) C( K

from pyproj import Proj

5 a# b% F) f, n! g3 D3 L

from keplergl import KeplerGl

0 |& ^1 f0 S* V' K1 C. v

from tqdm import tqdm

* Q9 z; K3 N8 `+ |1 l

import os

I3 t% x" U7 x; m' T. s/ p

import matplotlib.pyplot as plt

# P" m7 I, s* `

import shapely

" _: @9 P/ g; P$ k* j& Z

import numpy as np

7 z- O; y4 ^& _6 {' j

from datetime import datetime

W/ ^: l x) ^* K+ E+ u

import warnings

) h; _% m7 m# ^+ J

warnings.filterwarnings(ignore)

8 [2 O |; q+ c+ J0 O1 Y0 s

plt.rcParams[font.sans-serif] = [SimSun] # 指定默认字体为新宋体。

; F/ E' |$ y( l6 B1 f- C4 p

plt.rcParams[axes.unicode_minus] = False # 解决保存图像时 负号- 显示为方块和报错的问题。

/ ]6 z k4 a% D0 Z$ F) U5 j0 D1 F

#获取文件夹中的数据

" D: E) K8 }1 ?

def get_data(file_path,model):

. z' N9 @- I8 x1 F4 j( ?. X) ~

assert model in [train, test], {} Not Support this type of file.format(model)

' J% E# Q" K& D

paths = os.listdir(file_path)

& ~; F4 ]! O4 ]: b- d7 }

# print(len(paths))

: B; C: }3 d, J% _0 z. k! z8 `

tmp = []

( n0 h, S5 l9 b" B

for t in tqdm(range(len(paths))):

4 {/ i' _4 |4 |& M8 z! m/ Z" l0 a; G/ c

p = paths[t]

' d6 b( C, Z( [" H5 [

with open({}/{}.format(file_path, p), encoding=utf-8) as f:

) i1 J' m2 W! t) }' K/ o

next(f)

; A9 e! K$ v' I, [5 @: Y+ T5 v

for line in f.readlines():

4 g, a5 l+ w. F$ T

tmp.append(line.strip().split(,))

" ]0 G; @4 a) k/ D$ Y/ D+ \- D! y- F

tmp_df = pd.DataFrame(tmp)

' a# m5 k8 S+ X( j; X

if model == train:

7 L" N7 B3 n/ y" d# b

tmp_df.columns = [ID, lat, lon, speed, direction, time, type]

) H( f! S$ P% _ r2 P6 K

else:

/ J* \- h. v; o/ S

tmp_df[type] = unknown

- `4 Y0 X. Q! n' e8 n; S

tmp_df.columns = [ID, lat, lon, speed, direction, time, type]

& W9 F1 H2 i6 [5 W4 I- o. f" {

tmp_df[lat] = tmp_df[lat].astype(float)

& @. o/ Z* q5 \& A6 K% d, ]) d

tmp_df[lon] = tmp_df[lon].astype(float)

: [& k7 j# p' ^6 ?4 D0 o& e

tmp_df[speed] = tmp_df[speed].astype(float)

4 }+ {1 o0 T7 j" u/ E- @

tmp_df[direction] = tmp_df[direction].astype(int)#如果该行代码运行失败,请尝试更新pandas的版本

4 Q# ^" ^2 X* x: n2 L& E8 n# E

return tmp_df

* c7 o" C# p6 {# m

# 平面坐标转经纬度,供初赛数据使用

# Q$ `# P$ k/ \2 w& q2 ~# f' L

# 选择标准为NAD83 / California zone 6 (ftUS) (EPSG:2230),查询链接:CS2CS - Transform Coordinates On-line - MyGeodata Cloud

5 C9 k2 B( G6 e4 N2 T6 g5 m

def transform_xy2lonlat(df):

4 C3 z3 |$ j, m8 N

x = df[lat].values

, l, H- J5 P5 b( O4 w

y = df[lon].values

' w" t( v1 i# s) D6 `2 m

p=Proj(+proj=lcc +lat_1=33.88333333333333 +lat_2=32.78333333333333 +lat_0=32.16666666666666 +lon_0=-116.25 +x_0=2000000.0001016 +y_0=500000.0001016001 +datum=NAD83 +units=us-ft +no_defs )

7 L G5 ^& l k$ u3 t

df[lon], df[lat] = p(y, x, inverse=True)

' s* D: g( e3 j) O7 R* i: b

return df

/ i8 ^' U9 `$ I+ U9 J

#修改数据的时间格式

* o- A. S! F, g. _

def reformat_strtime(time_str=None, START_YEAR="2019"):

- [$ i- S. _9 }( O9 e8 K! k _: }

"""Reformat the strtime with the form 08 14 to START_YEAR-08-14 """

' b$ N. [& [& s9 I7 }( o# [

time_str_split = time_str.split(" ")

9 A; c- Q- Q i% N6 n4 w* w

time_str_reformat = START_YEAR + "-" + time_str_split[0][:2] + "-" + time_str_split[0][2:4]

3 O/ |" l7 _0 A/ W8 m

time_str_reformat = time_str_reformat + " " + time_str_split[1]

, t6 b7 g4 {7 { g! ]9 W2 d! X. f

# time_reformat=datetime.strptime(time_str_reformat,%Y-%m-%d %H:%M:%S)

3 j! S# c! w% a- s) }

return time_str_reformat

8 ?+ W/ f' }5 |& S5 u# D4 B

#计算两个点的距离

- j' K- `7 J( Y: |3 \* s: O6 H# O

def haversine_np(lon1, lat1, lon2, lat2):

. h C9 V3 W2 K

lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])

. _+ X/ }' t d2 y& u5 t9 o

dlon = lon2 - lon1

$ h# e }! N! p$ R# S' u

dlat = lat2 - lat1

G1 M; T2 C* W% b1 ^) G1 A. k

a = np.sin(dlat/2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2.0)**2

+ B# Q" C+ l+ }9 l1 T" X, O

c = 2 * np.arcsin(np.sqrt(a))

! Q$ e3 E7 v8 q4 b% _

km = 6367 * c

, q; y4 s" O- M& {8 T$ l8 ^+ E/ y

return km * 1000

5 h6 s# I. p* y, z

def compute_traj_diff_time_distance(traj=None):

% |/ z! z9 j/ [3 R- L

"""Compute the sampling time and the coordinate distance."""

7 C, x7 _% {) [1 j( |

# 计算时间的差值

" g ]1 O2 i5 H/ H9 a

time_diff_array = (traj["time"].iloc[1:].reset_index(drop=True) - traj[

8 ~& _& p0 B$ ~5 Z

"time"].iloc[:-1].reset_index(drop=True)).dt.total_seconds() / 60

5 J, i4 ~# ~% w' ^. F

# 计算坐标之间的距离

L, _* |% g* T* a4 P5 h. K; N

dist_diff_array = haversine_np(traj["lon"].values[1:], # lon_0

: {$ L) S' N- w: ]# C, z! J- w4 l7 z/ f

traj["lat"].values[1:], # lat_0

! {9 ~0 c4 X0 B" b1 v

traj["lon"].values[:-1], # lon_1

1 E7 {# \! _5 ^: R9 S( V

traj["lat"].values[:-1] # lat_1

& ^- z3 M! ?7 z. a$ n/ y; n

)

% v) x$ p2 v7 a8 ^/ D

# 填充第一个值

) R! [- Z0 z& z% g) H

time_diff_array = [time_diff_array.mean()] + time_diff_array.tolist()

- V c2 t, {- M& h

dist_diff_array = [dist_diff_array.mean()] + dist_diff_array.tolist()

1 F/ f0 y- S. x6 r

traj.loc[list(traj.index),time_array] = time_diff_array

3 S* I& W! g8 @* R

traj.loc[list(traj.index),dist_array] = dist_diff_array

% s7 ]( F6 N1 p5 o. ` [7 I3 C

return traj

* _$ e3 ]4 v- g# g( ~8 q4 `

#对轨迹进行异常点的剔除

# X9 k8 S/ `0 C3 {6 S1 [" T

def assign_traj_anomaly_points_nan(traj=None, speed_maximum=23,

* G4 _0 ^$ U- H @) U5 G4 E3 `

time_interval_maximum=200,

% u7 D( a" C' ` C' F, X8 |

coord_speed_maximum=700):

- C0 U3 ^6 O" U2 e

"""Assign the anomaly points in traj to np.nan."""

' I' q8 h' s. ~: N( |; c4 p& X

def thigma_data(data_y,n):

% Q7 h4 _5 o% @* v8 l1 e; B- J* ?2 |9 r

data_x =[i for i in range(len(data_y))]

; V$ G) |. i( `

ymean = np.mean(data_y)

5 y) ^7 \ |% k2 r1 ]2 k- J

ystd = np.std(data_y)

' k2 G3 O" U+ T

threshold1 = ymean - n * ystd

+ \8 @! _# b& _6 @

threshold2 = ymean + n * ystd

: N, i: h- a! d$ ^, u; y4 o

judge=[]

4 b3 O0 ~% t8 S: t5 O/ \! J

for data in data_y:

9 ?8 R" d* }! u7 @, m

if (data < threshold1)|(data> threshold2):

0 W7 b- x3 K# Z L$ M

judge.append(True)

- i2 z/ n- _& k

else:

4 D# q: E5 y! k3 s

judge.append(False)

2 m5 K0 V8 K9 e' I/ e( d

return judge

9 F2 y! w# V7 i7 _5 E$ y

# Step 1: The speed anomaly repairing

# d" C9 M! H# e5 y7 H2 n+ A7 O' C

is_speed_anomaly = (traj["speed"] > speed_maximum) | (traj["speed"] < 0)

: x( r* j8 i, i2 s" ~9 _- c6 A

traj["speed"][is_speed_anomaly] = np.nan

D* l% \3 S& {3 J

# Step 2: 根据距离和时间计算速度

$ B% m2 g) P1 i6 e. F, e" O. M% V. H

is_anomaly = np.array([False] * len(traj))

, r4 l4 s9 d7 [: _! k5 r& J9 s

traj["coord_speed"] = traj["dist_array"] / traj["time_array"]

# g/ W3 e1 q( I9 b& `

# Condition 1: 根据3-sigma算法剔除coord speed以及较大时间间隔的点

! l' a9 a$ A! i/ y) ]2 S

is_anomaly_tmp = pd.Series(thigma_data(traj["time_array"],3)) | pd.Series(thigma_data(traj["coord_speed"],3))

! _2 G# B# k" x9 O( b$ K4 V) G

is_anomaly = is_anomaly | is_anomaly_tmp

G% r6 J4 V: U0 D+ c; F) a

is_anomaly.index=traj.index

8 L$ x0 c/ z8 C# ^

# Condition 2: 轨迹点的3-sigma异常处理

- Q# O+ b: m* K' `8 T0 ^8 e- _

traj = traj[~is_anomaly].reset_index(drop=True)

5 w6 c, t$ u! O5 _( D/ H

is_anomaly = np.array([False] * len(traj))

" D' [, Z2 D& X( a" M

if len(traj) != 0:

, w( E7 e, ?0 }" b( r, ~7 V

lon_std, lon_mean = traj["lon"].std(), traj["lon"].mean()

( u& G$ d4 V4 I

lat_std, lat_mean = traj["lat"].std(), traj["lat"].mean()

" t; j, n$ q3 G, x8 a% [6 q8 x

lon_low, lon_high = lon_mean - 3 * lon_std, lon_mean + 3 * lon_std

: H |& ~9 E" @6 S0 ^' z' q

lat_low, lat_high = lat_mean - 3 * lat_std, lat_mean + 3 * lat_std

2 ]. v5 q0 b+ j2 I2 ] t! S

is_anomaly = is_anomaly | (traj["lon"] > lon_high) | ((traj["lon"] < lon_low))

( V. z& v3 O& z9 y5 ? L( `5 Z

is_anomaly = is_anomaly | (traj["lat"] > lat_high) | ((traj["lat"] < lat_low))

$ L( x5 y& H4 Z M

traj = traj[~is_anomaly].reset_index(drop=True)

, W' \& b: ^8 G# j e* ?% E

return traj, [len(is_speed_anomaly) - len(traj)]

4 s) s! i/ q/ Z2 z# n) \

df=get_data(rC:\Users\admin\hy_round1_train_20200102,train)

3 T' ~ m1 Z% O( z- F

#对轨迹进行异常点剔除,对nan值进行线性插值

1 p+ N% `" D$ W7 E

ID_list=list(pd.DataFrame(df[ID].value_counts()).index)

; X8 y; c# }. {7 B }6 b

DF_NEW=[]

! _0 R( M2 |6 Y& h+ b, M- B

Anomaly_count=[]

6 n! M% j4 b$ H* ?! m9 T

for ID in tqdm(ID_list):

2 E$ ~- H$ p% [1 G7 N

df_id=compute_traj_diff_time_distance(df[df[ID]==ID])

9 ], { ^0 I( n* @

df_new,count=assign_traj_anomaly_points_nan(df_id)

# O* r7 q3 Y& b. N

df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)

& y: j: C- w" Q4 k+ E

df_new = df_new.fillna(method="bfill")

# z3 f0 a% @4 E* V0 \# H! F( Y% g

df_new = df_new.fillna(method="ffill")

$ ?. q, M5 D ^2 Y( N5 V

df_new["speed"] = df_new["speed"].clip(0, 23)

, _; ~# K1 {+ \8 Q+ M

Anomaly_count.append(count)#统计每个id异常点的数量有多少

. D' B" S: w, C+ y+ n8 h9 U( y

DF_NEW.append(df_new)

( ?: e: m9 T- w7 R% a

#将数据写入到pkl格式

. {9 D" i8 a# s' }0 a: L Z

load_save = Load_Save_Data()

6 G- v9 Q1 R4 p4 o T) ?2 x

load_save.save_data(DF_NEW,"C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl")

$ v0 w7 ~# F. ` A- s) S* y

#### 三类渔船速度和方向可视化

2 c2 y* a- i I6 T* T

# 把训练集的所有数据,根据类别存放到不同的数据文件中

% q( C3 N7 V3 R( W/ e) D5 Q/ T+ `

def get_diff_data():

+ x1 d7 r* }& ]

Path = "C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl"

[9 }1 B, `- L3 f/ g

with open(Path,"rb") as f:

$ O9 E' s$ i# _! a8 q" c3 y

total_data = pickle.load(f)

4 d; s+ x6 W; v5 J% J5 e: b. { f

load_save = Load_Save_Data()

6 ]6 O4 @2 J' j; [

kind_data = ["刺网","围网","拖网"]

% W7 L; M% O' |1 G# Y3 E; \! ]* M8 S

file_names = ["ciwang_data.pkl","weiwang_data.pkl","tuowang_data.pkl"]

2 r6 c( R1 x2 \+ d7 G

for i,datax in enumerate(kind_data):

1 K! [+ e4 L" W( V4 T

data_type = [data for data in total_data if data["type"].unique()[0] == datax]

0 H4 N& C# }0 o& _9 i5 C; B2 X2 R

load_save.save_data(data_type,"C:/Users/admin/wisdomOcean/data_tmp1/" + file_names[i])

8 Q( A5 a$ h1 A9 D

get_diff_data()

# m3 u( U8 Z) v! h

#对轨迹进行异常点剔除,对nan值进行线性插值

, ~% {: g* V' }' b. V& W. x! |

ID_list=list(pd.DataFrame(df[ID].value_counts()).index)

e9 {+ r( [3 t. M; g# C

DF_NEW=[]

. H. l) H2 V0 B7 I: c% |' P

Anomaly_count=[]

?6 ?. z# z; m. P& M+ z0 ^2 s

for ID in tqdm(ID_list):

e3 P5 e, D9 y

df_id=compute_traj_diff_time_distance(df[df[ID]==ID])

4 v( F P* j; {% A4 n: U3 j5 N

df_new,count=assign_traj_anomaly_points_nan(df_id)

/ I2 a4 i, j# j6 i

df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)

5 |8 Z) a" H; z1 t2 w% m3 B- {

df_new = df_new.fillna(method="bfill")

\ d0 v: F' ^ D) ?( f, j

df_new = df_new.fillna(method="ffill")

) T, S$ A5 X4 K

df_new["speed"] = df_new["speed"].clip(0, 23)

/ K$ W+ w4 z: `2 e/ ^3 @8 Z- s: L

Anomaly_count.append(count)#统计每个id异常点的数量有多少

4 x" ?9 z2 V8 `' y8 S+ d7 P

DF_NEW.append(df_new)

' {/ Q3 g2 w8 b% W) b* ]8 H

# 每类轨迹,随机选取某个渔船,可视化速度序列和方向序列

- @4 U* C/ B+ Y6 w1 }7 p2 b' w

def visualize_three_traj_speed_direction():

$ {) x& j* ^4 {

fig,axes = plt.subplots(nrows=3,ncols=2,figsize=(20,15))

3 z' H1 v i: }# X) x

plt.subplots_adjust(wspace=0.3,hspace=0.3)

9 a' G' r5 I4 |& l$ \: @1 L

# 随机选出刺网的三条轨迹进行可视化

* t6 ?% _0 P% { s( ^) I

file_types = ["ciwang_data","weiwang_data","tuowang_data"]

2 v8 I( N0 E; X4 U& z. @* O

speed_types = ["ciwang_speed","weiwang_speed","tuowang_speed"]

5 \$ {+ D% s0 D5 ]2 ]$ H) P

doirections = ["ciwang_direction","weiwang_direction","tuowang_direction"]

7 r8 c7 F. z* Q* n( i* j

colors = [pink, lightblue, lightgreen]

# w9 r, [. X. S' e

for i,file_name in tqdm(enumerate(file_types)):

# p2 K, v6 y. e

datax = get_random_one_traj(type=file_name)

, Y: _' T" e7 \$ B- G

x_data = datax["速度"].loc[-1:].values

) l+ G* k( z0 X6 N5 x

y_data = datax["方向"].loc[-1:].values

. R0 o9 H5 X2 p0 F' h

axes[i][0].plot(range(len(x_data)), x_data, label=speed_types[i], color=colors[i])

4 O% I* j5 z, l; Y$ y9 A! L

axes[i][0].grid(alpha=2)

! A. O. J; ~' M% t' m, E7 I

axes[i][0].legend(loc="best")

8 W7 _( Z% u7 {; {9 I

axes[i][1].plot(range(len(y_data)), y_data, label=doirections[i], color=colors[i])

; X t; q5 ]8 e& k6 B( k( E; K

axes[i][1].grid(alpha=2)

5 t2 @6 Q; S- ~

axes[i][1].legend(loc="best")

; k$ x/ q3 ?8 x3 F+ n1 n6 H

plt.show()

( {- \" l* x: X! u5 R% b

visualize_three_traj_speed_direction()

) G2 {4 g1 I, n! ?' j( w
2 S, A5 N, J, Y1 u/ X

作业二:相关性分析。

/ V/ E0 _' d2 N

data_train.loc[data_train[type]==刺网,type_id]=1

& t0 u: T7 [: X: G5 }! N

data_train.loc[data_train[type]==围网,type_id]=2

+ S/ m: e& H! H- o: K+ o

data_train.loc[data_train[type]==拖网,type_id]=3

Y" `1 M/ _, b V3 A

f, ax = plt.subplots(figsize=(9, 6))

& k3 E1 E1 O/ n3 d: h* H! L; }8 U

ax = sns.heatmap(np.abs(df.corr()),annot=True)

( ?& C' T+ x$ L; Q Q }

plt.show()

, Y0 _% D4 l$ h* s" N9 J; a ! R! g( D0 h# X3 \

从图中可以清楚看到,经纬度和速度跟类型相关性比较大。

( u& S( V* Y3 k) y. g9 w3 N" r) m5 f3 {3 K, P: x _' @ [8 s7 T5 [/ i+ ]2 y) j , w3 k8 P7 B8 T5 |8 e U$ p' D( g% n; `8 j# Z$ y6 D, M0 `
回复

举报 使用道具

全部回帖
暂无回帖,快来参与回复吧
懒得打字?点击右侧快捷回复 【吾爱海洋论坛发文有奖】
您需要登录后才可以回帖 登录 | 立即注册
陌羡尘
活跃在3 天前
快速回复 返回顶部 返回列表