& B0 S, G8 \. i$ c( a/ B& ^ 此部分为智慧海洋建设竞赛的数据分析模块,通过数据分析,可以熟悉数据,为后面的特征工程做准备,欢迎大家后续多多交流。 赛题:智慧海洋建设
0 ^3 ^8 o/ d$ J' j Z 数据分析的目的:
/ [& Z1 a' I; G, n2 k7 \ EDA的主要价值在于熟悉整个数据集的基本情况(缺失值、异常值),来确定所获得数据集可以用于接下来的机器学习或者深度学习使用。了解特征之间的相关性、分布,以及特征与预测值之间的关系。为进行特征工程提供理论依据。项目地址:https://github.com/datawhalechina/team-learning-data-mining/tree/master/wisdomOcean比赛地址:https://tianchi.aliyun.com/competition/entrance/231768/introduction?spm=5176.12281957.1004.8.4ac63eafE1rwsY
' s4 W% I s* v* v) N 2.1 学习目标 学习如何对数据集整体概况进行分析,包括数据集的基本情况(缺失值、异常值)学习了解变量之间的相互关系、变量与预测值之间的存在关系。完成相应学习打卡任务2.2 内容介绍 数据总体了解读取数据集并了解数据集的大小,原始特征维度;通过info了解数据类型;粗略查看数据集中各特征的基本统计量缺失值和唯一值查看数据缺失值情况查看唯一值情况数据特性和特征分布 W: u. n4 J* G6 U9 {$ C
三类渔船轨迹的可视化坐标序列可视化三类渔船速度和方向序列可视化三类渔船速度和方向的数据分布 作业一:剔除异常点后画图import pandas as pd 8 G7 a" U2 }1 N
import geopandas as gpd
6 {! j+ h _6 i# J9 M from pyproj import Proj
/ c+ j7 {! F6 m from keplergl import KeplerGl 3 e8 C7 n1 R6 ~. {- [2 _+ ? b
from tqdm import tqdm
5 n+ a( a- w7 Y! m7 e5 C import os
- j |& I# _6 A9 T import matplotlib.pyplot as plt 5 t# N3 c* z" i% R$ D9 x: L
import shapely " z$ D3 R$ |+ }: t: H6 U
import numpy as np Y. ?. v. {) U. @; b2 x# Y& x
from datetime import datetime # a8 A1 `. u" u
import warnings
' A. k" I% A) ], V2 \, R' w warnings.filterwarnings(ignore)
, k! a3 l4 D2 s+ J0 V+ \% G- h plt.rcParams[font.sans-serif] = [SimSun] # 指定默认字体为新宋体。
, Y, t- |5 b! e5 x2 W plt.rcParams[axes.unicode_minus] = False # 解决保存图像时 负号- 显示为方块和报错的问题。 . t5 ]$ @3 l2 |/ v
#获取文件夹中的数据
& U# E' |" ^' Y. o/ J" N& @ def get_data(file_path,model):
+ E+ k6 Y0 M9 r; A assert model in [train, test], {} Not Support this type of file.format(model)
' r; o* w% }3 [ paths = os.listdir(file_path)
+ N g/ q" e$ C% u! _/ o& }+ I2 r # print(len(paths))
3 l2 q$ {3 W1 _' [5 A tmp = []
5 g$ J% Z) w0 o5 D for t in tqdm(range(len(paths))): 4 |4 V9 a* N% r: _' ^
p = paths[t]
3 Q. g' U0 v, J/ D4 k with open({}/{}.format(file_path, p), encoding=utf-8) as f: # u& {& X3 n' }" Q+ A% c- A3 U
next(f)
5 }' t$ b9 {- B/ n2 i8 b( k H& A for line in f.readlines(): 2 ]0 w& U. }( A n& q6 H
tmp.append(line.strip().split(,))
: f; U h6 Q- U/ T" y" d5 K tmp_df = pd.DataFrame(tmp) J! U: H" F. Y
if model == train:
, w! M7 T3 m9 M/ N2 N tmp_df.columns = [ID, lat, lon, speed, direction, time, type]
+ A. I( M- t, p4 X0 J else:
& u* M3 Z/ T+ H3 R& n tmp_df[type] = unknown 0 t9 {9 L. c$ s
tmp_df.columns = [ID, lat, lon, speed, direction, time, type] . y# z L, G$ E3 t. N9 Q
tmp_df[lat] = tmp_df[lat].astype(float) 1 [4 F2 D9 x% E8 h7 }
tmp_df[lon] = tmp_df[lon].astype(float)
& K3 X) S9 p, F1 w tmp_df[speed] = tmp_df[speed].astype(float)
5 [; R) a4 \9 g+ p3 q% E tmp_df[direction] = tmp_df[direction].astype(int)#如果该行代码运行失败,请尝试更新pandas的版本 ; M5 H* ]( p6 t7 O9 R4 c* d
return tmp_df
! C( H0 {4 _" T8 @" x# _ # 平面坐标转经纬度,供初赛数据使用 ( a: c, l( w5 x1 J; C. ~: h5 T
# 选择标准为NAD83 / California zone 6 (ftUS) (EPSG:2230),查询链接:CS2CS - Transform Coordinates On-line - MyGeodata Cloud : _9 g4 W4 \+ d' Z6 Y
def transform_xy2lonlat(df): ! w5 ^& U& l2 I/ c8 D& x! {! {
x = df[lat].values 9 ] C$ E; ]1 F1 i8 p
y = df[lon].values 8 o' U8 C6 V, l; i m$ S L
p=Proj(+proj=lcc +lat_1=33.88333333333333 +lat_2=32.78333333333333 +lat_0=32.16666666666666 +lon_0=-116.25 +x_0=2000000.0001016 +y_0=500000.0001016001 +datum=NAD83 +units=us-ft +no_defs ) 0 u" o. B2 e; [5 w2 q# z0 b( `
df[lon], df[lat] = p(y, x, inverse=True) 4 c$ K& I" Q' }6 d8 u, z' b
return df
+ f% C. ?% X: T #修改数据的时间格式 . }* }) L( {- d5 P7 p
def reformat_strtime(time_str=None, START_YEAR="2019"): ) q i4 S6 c3 o
"""Reformat the strtime with the form 08 14 to START_YEAR-08-14 """
0 W1 L$ r0 o9 }; I' {2 ` time_str_split = time_str.split(" ")
. W d# p' Z( I8 M) E/ q# Z time_str_reformat = START_YEAR + "-" + time_str_split[0][:2] + "-" + time_str_split[0][2:4] Z$ C9 K/ J& l3 h6 K2 g
time_str_reformat = time_str_reformat + " " + time_str_split[1]
0 A) W7 m8 B8 ^9 K* i # time_reformat=datetime.strptime(time_str_reformat,%Y-%m-%d %H:%M:%S)
% a! P y& k% V# Q9 ?0 R& l8 U return time_str_reformat
0 U5 R( O+ z% X" G #计算两个点的距离 ) k& E j9 ?+ f1 ?
def haversine_np(lon1, lat1, lon2, lat2): 8 ^6 H$ Z# W+ G! h3 q3 g& E+ q7 P
lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])
* P- \- f# B+ @8 y dlon = lon2 - lon1 4 Z& [' q" i1 _* v2 V
dlat = lat2 - lat1
% E- {& z' T2 _: y1 I a = np.sin(dlat/2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2.0)**2
, C/ }- i& H6 a' z c = 2 * np.arcsin(np.sqrt(a)) . V/ y1 W: ^6 t2 P1 A
km = 6367 * c % _ \# O, j0 c
return km * 1000
1 Q V4 l+ j) o: N def compute_traj_diff_time_distance(traj=None):
1 m2 _* ?5 P9 {2 o """Compute the sampling time and the coordinate distance."""
- A* [& F/ K. C) V+ e( i& n$ @6 Z; y # 计算时间的差值
! |& d/ b2 I1 R8 Y/ ?+ b time_diff_array = (traj["time"].iloc[1:].reset_index(drop=True) - traj[ 0 k2 O) M5 a7 s$ a: R6 U$ |
"time"].iloc[:-1].reset_index(drop=True)).dt.total_seconds() / 60
8 g% F7 ^: s0 u0 z # 计算坐标之间的距离
5 ~! |0 ?: `: Z* f; \5 \ dist_diff_array = haversine_np(traj["lon"].values[1:], # lon_0
2 G, B) q7 X/ e% g2 F+ H traj["lat"].values[1:], # lat_0
& Q6 |0 H! w" i! y traj["lon"].values[:-1], # lon_1
8 p) k$ ?. \. P9 v o L5 ^ traj["lat"].values[:-1] # lat_1 6 \' G, p/ U+ G( K
)
" v- g) \8 c+ A7 O j # 填充第一个值 " e# A3 e7 L p* a) p6 W
time_diff_array = [time_diff_array.mean()] + time_diff_array.tolist()
* c, R$ Y+ k' a9 } dist_diff_array = [dist_diff_array.mean()] + dist_diff_array.tolist() ( v6 W0 A8 {% G2 h2 R
traj.loc[list(traj.index),time_array] = time_diff_array " s6 l Z. b2 a4 u" Y n2 H* T
traj.loc[list(traj.index),dist_array] = dist_diff_array
5 J2 d1 s& F$ ` C- u$ r1 c& x return traj
- c; F4 ^0 P, O #对轨迹进行异常点的剔除
9 ?4 ^& i8 b# a( \ def assign_traj_anomaly_points_nan(traj=None, speed_maximum=23, - D5 p+ H, r3 m5 C6 k# l
time_interval_maximum=200, 6 o4 k% |. _. a, }% C. ^
coord_speed_maximum=700):
. d* R6 _ s8 L0 h6 z """Assign the anomaly points in traj to np.nan.""" ( b" g' [6 L& V# v1 C
def thigma_data(data_y,n):
- T4 D8 G- {1 X& l2 n7 p' p data_x =[i for i in range(len(data_y))]
6 z, n$ N" y8 f! Q ymean = np.mean(data_y)
% `- _- N/ Z* J% A- | ystd = np.std(data_y)
- n! k. E7 r2 W3 x: ^% ` threshold1 = ymean - n * ystd $ ]6 a/ f% g! B" @$ \/ H2 k
threshold2 = ymean + n * ystd
! c% R% L: @$ X" m" _ judge=[] 6 P! o! a. r0 z5 I
for data in data_y: ) B @) z Y. M
if (data < threshold1)|(data> threshold2):
. `' \; V, q& S* g$ X2 R9 X* X judge.append(True)
8 }* D# c: L! z! h4 B3 ]( i4 w else:
: W# _1 M! x- ? judge.append(False) * e+ J# G8 E+ @+ J: p# ]1 W' f
return judge
5 v0 R8 s- e9 J4 C( x4 J # Step 1: The speed anomaly repairing
9 I' m! B/ ]6 o/ }6 B is_speed_anomaly = (traj["speed"] > speed_maximum) | (traj["speed"] < 0) * _- d" @3 F- l" C; B- j
traj["speed"][is_speed_anomaly] = np.nan * c8 W1 u; a" Y
# Step 2: 根据距离和时间计算速度 & l. i' l8 z4 O7 p1 m
is_anomaly = np.array([False] * len(traj))
1 o0 S, U. o+ c& Q8 } traj["coord_speed"] = traj["dist_array"] / traj["time_array"] 5 r% x) T+ [- s0 i8 U
# Condition 1: 根据3-sigma算法剔除coord speed以及较大时间间隔的点 & r) Q) q! _5 F: t' s
is_anomaly_tmp = pd.Series(thigma_data(traj["time_array"],3)) | pd.Series(thigma_data(traj["coord_speed"],3)) w2 c- r/ Q3 O
is_anomaly = is_anomaly | is_anomaly_tmp
# D d3 g3 v& n. k/ L! N: y1 ^$ n7 ]; ] is_anomaly.index=traj.index
+ |# K/ F. u; C$ T' [ # Condition 2: 轨迹点的3-sigma异常处理 : u) {2 y" t# t3 I* n# z- @
traj = traj[~is_anomaly].reset_index(drop=True)
/ V& d) n1 t+ P9 F/ [1 G8 ]1 ~ is_anomaly = np.array([False] * len(traj))
3 A1 l" v- g$ B( I$ z if len(traj) != 0: . Z) w% \5 [$ ~& e7 M% J( N* p
lon_std, lon_mean = traj["lon"].std(), traj["lon"].mean() 4 w3 [ J. f" M" M0 U
lat_std, lat_mean = traj["lat"].std(), traj["lat"].mean()
5 Y% F# }; h) @) C: [1 W lon_low, lon_high = lon_mean - 3 * lon_std, lon_mean + 3 * lon_std
. H* O4 Y, d9 [( G2 A" f. ^ lat_low, lat_high = lat_mean - 3 * lat_std, lat_mean + 3 * lat_std 9 O7 e C1 C4 G3 K( o+ e- T1 s
is_anomaly = is_anomaly | (traj["lon"] > lon_high) | ((traj["lon"] < lon_low))
9 F" c4 D4 m7 P- j- v is_anomaly = is_anomaly | (traj["lat"] > lat_high) | ((traj["lat"] < lat_low))
% z) F, |( L4 V# J traj = traj[~is_anomaly].reset_index(drop=True)
2 `4 z. `0 F3 \, h+ u" \! u return traj, [len(is_speed_anomaly) - len(traj)] * Q: V9 ^& T2 A2 ?5 Y
df=get_data(rC:\Users\admin\hy_round1_train_20200102,train)
2 K' Z( B3 i' p #对轨迹进行异常点剔除,对nan值进行线性插值
/ i# K- d6 L7 i/ [1 U+ U ID_list=list(pd.DataFrame(df[ID].value_counts()).index) $ R& h( Y5 C) {4 Q" A
DF_NEW=[] ) g9 H% S( S+ ~% A; q8 }+ g! y& F
Anomaly_count=[] , s: k* ~/ }7 \0 T- z0 q
for ID in tqdm(ID_list):
% ]0 V {3 O; K; ` df_id=compute_traj_diff_time_distance(df[df[ID]==ID]) ) P) U6 F; U% t: z8 o) n8 e& h
df_new,count=assign_traj_anomaly_points_nan(df_id) 7 M4 F8 c" m$ B- W
df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0) 4 h+ m# s H$ i* r0 g
df_new = df_new.fillna(method="bfill") . r' L5 ?! p! C) m' m1 R
df_new = df_new.fillna(method="ffill")
( A+ e% Y. ^4 i1 ~: q9 Z: a/ D7 n- |1 G df_new["speed"] = df_new["speed"].clip(0, 23) 0 p! m8 W( ` n V( |4 i, A
Anomaly_count.append(count)#统计每个id异常点的数量有多少 9 _4 Y/ E3 B/ D" q6 O5 S
DF_NEW.append(df_new)
+ a V% P& W2 ^' X1 s #将数据写入到pkl格式
0 r+ s. G% F) V% U7 J load_save = Load_Save_Data()
/ U0 s* \& U/ F V" ]4 ^ load_save.save_data(DF_NEW,"C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl") 8 o3 z/ p* U7 a2 {0 e. H% f! T1 {
#### 三类渔船速度和方向可视化
+ F5 j& k, k; u0 V/ l/ z$ `. d # 把训练集的所有数据,根据类别存放到不同的数据文件中 , f& q: w! [. {2 J. F
def get_diff_data(): ! I& c9 R. b* j& B1 O# L7 A7 s
Path = "C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl"
+ |: T# I0 R, @7 w with open(Path,"rb") as f:
2 ~) H, ]. q) U7 ` total_data = pickle.load(f) ( o0 c# p' N8 _( f9 x! z% N
load_save = Load_Save_Data()
( i4 B+ i6 Y, }; S, U* t ^ kind_data = ["刺网","围网","拖网"] ! R; N7 n: \/ L- r) x
file_names = ["ciwang_data.pkl","weiwang_data.pkl","tuowang_data.pkl"]
* p$ D1 e" u+ z1 a0 Q$ z6 f for i,datax in enumerate(kind_data): ' @7 }: D E" ~6 n1 F( n9 S1 _
data_type = [data for data in total_data if data["type"].unique()[0] == datax]
. r, Z/ X* {! e! t& k+ Z2 B- L @, x load_save.save_data(data_type,"C:/Users/admin/wisdomOcean/data_tmp1/" + file_names[i]) & F! d: e$ |: A
get_diff_data() * T" |; M# R/ E0 y% w
#对轨迹进行异常点剔除,对nan值进行线性插值
8 ^9 Y' I d/ x ID_list=list(pd.DataFrame(df[ID].value_counts()).index)
: ]5 z2 Q3 W- { DF_NEW=[] ) m7 k: f5 V( k9 b! W' j L
Anomaly_count=[]
: z+ f5 Z" V! w' n for ID in tqdm(ID_list): ! x( X! G. M: ^/ D; t* M) W0 a
df_id=compute_traj_diff_time_distance(df[df[ID]==ID])
8 h) ?+ C' n3 |) t P; Z& ^ df_new,count=assign_traj_anomaly_points_nan(df_id) 7 [, ?; F& O7 o( I
df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)
* a* [3 v. {0 _# ?; b- l df_new = df_new.fillna(method="bfill") 2 @, b1 ?* r5 _/ \
df_new = df_new.fillna(method="ffill") - j8 O. O8 O) s' S" D8 [
df_new["speed"] = df_new["speed"].clip(0, 23) * e3 z; q' J* s) K; w
Anomaly_count.append(count)#统计每个id异常点的数量有多少 " p9 b0 R- X. d& F
DF_NEW.append(df_new) / w- V1 V: N8 g% m1 d/ ^
# 每类轨迹,随机选取某个渔船,可视化速度序列和方向序列
; c5 C" W3 k( B def visualize_three_traj_speed_direction(): 1 N e+ P- B0 ^: ]) c" G6 X8 a
fig,axes = plt.subplots(nrows=3,ncols=2,figsize=(20,15)) * B* u) f2 S) X. n$ E4 S% {
plt.subplots_adjust(wspace=0.3,hspace=0.3)
4 X2 }& H K, ^3 l7 K # 随机选出刺网的三条轨迹进行可视化 / d% \3 V! l7 J& R3 h G
file_types = ["ciwang_data","weiwang_data","tuowang_data"] 0 j& x+ Q( _2 Z1 }6 h
speed_types = ["ciwang_speed","weiwang_speed","tuowang_speed"]
/ ~* y" w: z& `+ ^3 z5 B, s9 D doirections = ["ciwang_direction","weiwang_direction","tuowang_direction"] - l D: L. M1 z
colors = [pink, lightblue, lightgreen] % t6 B% p! B: ^" P
for i,file_name in tqdm(enumerate(file_types)): 7 g& |/ ~8 L3 e/ G, j$ |( `
datax = get_random_one_traj(type=file_name)
& N, ]$ [% l. `5 ? Y6 W x_data = datax["速度"].loc[-1:].values ( O1 P; a4 C8 q8 N
y_data = datax["方向"].loc[-1:].values 1 n+ t v0 P: l! S- a& \0 @. y
axes[i][0].plot(range(len(x_data)), x_data, label=speed_types[i], color=colors[i])
3 {; ^* Q. R* M, U; k9 ? axes[i][0].grid(alpha=2)
% n' u+ C" E: Q2 S: ` axes[i][0].legend(loc="best")
3 H/ | G2 `# d% D. v* o axes[i][1].plot(range(len(y_data)), y_data, label=doirections[i], color=colors[i])
6 Y g, P/ M& M( N8 O# ~ axes[i][1].grid(alpha=2)
1 {$ _7 \# v# \ axes[i][1].legend(loc="best") 7 r. K3 g- F, ^6 m% O( N- s
plt.show()
$ K" j( G# D( B1 _/ N visualize_three_traj_speed_direction() $ v# a/ X u% g) B( [
3 A+ @* C- P( M8 t k7 R. B/ c
作业二:相关性分析。
3 d# {# n. z3 M: e8 Z4 h# ^3 r data_train.loc[data_train[type]==刺网,type_id]=1
( S! c8 x" d7 \. H/ o# k1 r1 u data_train.loc[data_train[type]==围网,type_id]=2
6 Y9 @/ G. A% c1 u3 E data_train.loc[data_train[type]==拖网,type_id]=3
- B: D& W) n# Y1 v f, ax = plt.subplots(figsize=(9, 6))
# L3 ]/ I c9 E3 W. M5 G B ax = sns.heatmap(np.abs(df.corr()),annot=True)
( N" Y. a& A( @( R* m& D8 y: q plt.show()
; q0 @5 g3 |3 X5 e- ]. M& ?. [ % ~* e; S, ?5 [+ {6 O! e. _9 d
从图中可以清楚看到,经纬度和速度跟类型相关性比较大。
3 A" ?" r3 K7 |2 L- Q
( Y5 K1 y& U/ v8 ~* L: A$ U/ Q% }- H9 l
1 |4 Y* I6 v1 [+ S1 n8 u5 ?
0 M) b! {% m3 _0 n6 l |