) W- Q* M' x y2 P8 K2 n 此部分为智慧海洋建设竞赛的数据分析模块,通过数据分析,可以熟悉数据,为后面的特征工程做准备,欢迎大家后续多多交流。 赛题:智慧海洋建设( G/ i& T- z* U) R
数据分析的目的: 6 x7 R5 R2 g3 d6 h+ |3 P& J
EDA的主要价值在于熟悉整个数据集的基本情况(缺失值、异常值),来确定所获得数据集可以用于接下来的机器学习或者深度学习使用。了解特征之间的相关性、分布,以及特征与预测值之间的关系。为进行特征工程提供理论依据。项目地址:https://github.com/datawhalechina/team-learning-data-mining/tree/master/wisdomOcean比赛地址:https://tianchi.aliyun.com/competition/entrance/231768/introduction?spm=5176.12281957.1004.8.4ac63eafE1rwsY
$ h' t4 s' W: S Y$ J% C9 x4 o2 k 2.1 学习目标 学习如何对数据集整体概况进行分析,包括数据集的基本情况(缺失值、异常值)学习了解变量之间的相互关系、变量与预测值之间的存在关系。完成相应学习打卡任务2.2 内容介绍 数据总体了解读取数据集并了解数据集的大小,原始特征维度;通过info了解数据类型;粗略查看数据集中各特征的基本统计量缺失值和唯一值查看数据缺失值情况查看唯一值情况数据特性和特征分布 6 o* |. Z) S0 J6 i
三类渔船轨迹的可视化坐标序列可视化三类渔船速度和方向序列可视化三类渔船速度和方向的数据分布 作业一:剔除异常点后画图import pandas as pd 8 `# v6 f3 X& p+ y
import geopandas as gpd 5 V8 g$ G/ o' f2 G# I ? h
from pyproj import Proj
3 K9 {9 t% \4 I from keplergl import KeplerGl
! z4 w& l, r# n* Q X5 C9 [* l/ S. q from tqdm import tqdm
5 t% g6 e. ?6 o$ H+ {/ Y9 } import os
* [2 P. D$ P3 j import matplotlib.pyplot as plt 3 c3 r' H/ f7 a3 ?- C! W; F
import shapely - Y# ^" O- {4 a9 k! l c
import numpy as np 0 E1 Q% z- }' |: m8 n: }
from datetime import datetime
% R: e" r/ O) D; s' k2 |$ _ import warnings % V, m: U- l; G6 i7 J) Z! V% C
warnings.filterwarnings(ignore) * c* F- i# G9 ?9 `
plt.rcParams[font.sans-serif] = [SimSun] # 指定默认字体为新宋体。
+ m+ ~0 i: w! s) Q% s m0 ]: u& Z plt.rcParams[axes.unicode_minus] = False # 解决保存图像时 负号- 显示为方块和报错的问题。 * d1 k. h, }. P; K ^
#获取文件夹中的数据
) Z- C; I1 Z# D: E! t def get_data(file_path,model):
- P1 B" l$ h/ V9 q Y assert model in [train, test], {} Not Support this type of file.format(model)
L5 c l/ z# h paths = os.listdir(file_path)
' o, h: G9 }4 @0 J' K, z0 f # print(len(paths)) ' P4 I3 W9 e% D1 g% J
tmp = [] ; r% G! V2 W/ e
for t in tqdm(range(len(paths))):
# W. R; t$ \! I, X1 [6 T p = paths[t] 4 C" H0 A1 Y% W, T2 f
with open({}/{}.format(file_path, p), encoding=utf-8) as f:
9 W2 i" _# I' O; x: q# ^4 g next(f) - }! O5 a6 C; b/ D, {0 k2 t
for line in f.readlines(): 4 L; _. K! [2 |7 B2 F
tmp.append(line.strip().split(,))
% U7 M- _& R1 y* o: }0 [! y tmp_df = pd.DataFrame(tmp)
?2 S. l! D" K if model == train:
7 _- ?4 S: U- L; H tmp_df.columns = [ID, lat, lon, speed, direction, time, type] ) M& |; u/ Y: l/ f7 d: n
else: / v9 p: C7 R2 m, }: A) }+ J- G
tmp_df[type] = unknown ; J5 \0 ?7 @4 K. a' e5 @5 c
tmp_df.columns = [ID, lat, lon, speed, direction, time, type] 2 d2 I# I, m/ n" o1 j% m' e5 N
tmp_df[lat] = tmp_df[lat].astype(float) : M1 n( T) z/ h2 ?, u6 V
tmp_df[lon] = tmp_df[lon].astype(float) - I2 U" k$ d3 @7 g4 V# d4 d0 |! I
tmp_df[speed] = tmp_df[speed].astype(float) $ G0 b- l8 r9 w: v& F
tmp_df[direction] = tmp_df[direction].astype(int)#如果该行代码运行失败,请尝试更新pandas的版本
5 e6 @8 ^+ s6 B* f, A' k/ I; P8 w return tmp_df
* M, o1 S& k$ m/ e. f! U # 平面坐标转经纬度,供初赛数据使用
" ?% p: v0 ], G# i # 选择标准为NAD83 / California zone 6 (ftUS) (EPSG:2230),查询链接:CS2CS - Transform Coordinates On-line - MyGeodata Cloud
' E, A' ]9 [* i( G1 ~. ]* V; I1 U def transform_xy2lonlat(df): 2 W* G+ R& Z* Q! d! Z
x = df[lat].values ( u; ^# N; `$ l8 t" I$ l
y = df[lon].values
" A/ c! \0 d0 M. s) e0 k) g p=Proj(+proj=lcc +lat_1=33.88333333333333 +lat_2=32.78333333333333 +lat_0=32.16666666666666 +lon_0=-116.25 +x_0=2000000.0001016 +y_0=500000.0001016001 +datum=NAD83 +units=us-ft +no_defs ) & T. r: p, \ }
df[lon], df[lat] = p(y, x, inverse=True)
3 K- K7 c5 D4 z9 l4 }% `2 k/ ^ return df % O7 D/ J6 g/ e5 n2 p! {" ~4 ~) `
#修改数据的时间格式 ) s" |$ C M, P" L( W( q! q
def reformat_strtime(time_str=None, START_YEAR="2019"): . c5 ?9 E/ S) F @ J+ V- J V
"""Reformat the strtime with the form 08 14 to START_YEAR-08-14 """
. I! F, O5 k6 N0 M' T# ^ time_str_split = time_str.split(" ")
# u6 N! f) t. v9 E+ N time_str_reformat = START_YEAR + "-" + time_str_split[0][:2] + "-" + time_str_split[0][2:4] ( Q2 e2 l! Y& B; [/ d
time_str_reformat = time_str_reformat + " " + time_str_split[1] , k+ b7 z6 U% u# e7 W- I4 H+ i
# time_reformat=datetime.strptime(time_str_reformat,%Y-%m-%d %H:%M:%S)
9 a# Z) p/ o$ _8 H+ e6 m2 p return time_str_reformat
0 i3 w# D0 p& I% f) a/ p #计算两个点的距离 % v# i. E% n: x: B+ f/ F
def haversine_np(lon1, lat1, lon2, lat2):
" F1 j; j# U2 M3 m) l lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2]) - x% d; L1 `( N' y3 u( p+ k+ F
dlon = lon2 - lon1
$ f& y5 w p4 t7 s* K, |7 Y dlat = lat2 - lat1
3 d* }$ A( r3 x+ D+ y8 [ a = np.sin(dlat/2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2.0)**2
3 z9 @+ q9 P' ] c = 2 * np.arcsin(np.sqrt(a)) 5 b4 B2 @" S' b; F. ]: o
km = 6367 * c 7 }! _. e, y* A- W8 S4 O
return km * 1000
. z8 O0 Q! w) Z+ j def compute_traj_diff_time_distance(traj=None):
5 q4 @: ]) c; X$ X1 Y """Compute the sampling time and the coordinate distance.""" 8 Q2 t: y! o* S
# 计算时间的差值
2 n6 C7 @& F. z' b9 E time_diff_array = (traj["time"].iloc[1:].reset_index(drop=True) - traj[
/ T2 u* |+ d5 V& a( @9 ~ "time"].iloc[:-1].reset_index(drop=True)).dt.total_seconds() / 60
4 d9 s, D, s! w # 计算坐标之间的距离 2 }# f7 K' y, L, U, L* Q) @4 Q x
dist_diff_array = haversine_np(traj["lon"].values[1:], # lon_0 ) t* \* ]$ I) e0 i% n2 U
traj["lat"].values[1:], # lat_0 / b4 E3 f" D( i" J0 I# ^
traj["lon"].values[:-1], # lon_1
4 w* r% t& f/ {9 o traj["lat"].values[:-1] # lat_1 / Z* \/ ~5 |) s, [" }3 ]5 w
)
& G# B6 M; m0 u2 X# z2 x # 填充第一个值
: d8 x& F4 |% ]! f8 T$ g0 | time_diff_array = [time_diff_array.mean()] + time_diff_array.tolist() : P6 ?1 z( y2 N9 L+ k
dist_diff_array = [dist_diff_array.mean()] + dist_diff_array.tolist() 5 x0 ]& k. G" L# s& e
traj.loc[list(traj.index),time_array] = time_diff_array
! n$ {- C/ `8 z' h8 P traj.loc[list(traj.index),dist_array] = dist_diff_array
8 f8 d# c; V* H return traj ) m2 z {( J5 i
#对轨迹进行异常点的剔除 C1 S$ z' I. }
def assign_traj_anomaly_points_nan(traj=None, speed_maximum=23,
# z2 V, A6 m; l \/ G7 V time_interval_maximum=200,
) A& e" O# d8 Q7 _2 |. N coord_speed_maximum=700): % @1 c8 p5 ~4 c+ g5 b) v+ @: V
"""Assign the anomaly points in traj to np.nan.""" 5 ^3 Z% w( l, t1 t* z3 j
def thigma_data(data_y,n):
) E- t7 T* o9 y2 s0 ?+ Q& J data_x =[i for i in range(len(data_y))]
& Z' |' X6 E. \ J7 [, a& l ymean = np.mean(data_y)
+ J+ B' C7 r, J0 l6 {3 f+ H ystd = np.std(data_y)
5 n4 Y, `: ]3 Y( x& q threshold1 = ymean - n * ystd
! {. f9 ?! W3 f& s threshold2 = ymean + n * ystd
# y$ N* T+ `2 H( } judge=[]
3 q, Q) z; q( U3 L, C8 n5 t for data in data_y: 4 J$ T3 t3 u* X. U* k+ i
if (data < threshold1)|(data> threshold2): 2 v8 @ M6 |4 s- A/ |( S
judge.append(True)
$ A' M0 ^/ A( \# N$ w9 V else: 2 d+ {' }$ b/ ^3 L# B
judge.append(False)
% H3 V4 {) M" h* Z4 s return judge 8 B; {4 A; _, @" z. ~8 N# @
# Step 1: The speed anomaly repairing " Q8 s2 _9 _7 p6 p& ?
is_speed_anomaly = (traj["speed"] > speed_maximum) | (traj["speed"] < 0)
# N0 |4 Z, W# S2 W traj["speed"][is_speed_anomaly] = np.nan * T( m# Q0 U% F" L( o# [
# Step 2: 根据距离和时间计算速度 ) ?7 T2 e: g6 N5 m P
is_anomaly = np.array([False] * len(traj)) . Y8 I9 Y# p7 O: V
traj["coord_speed"] = traj["dist_array"] / traj["time_array"] . L/ l, G% w1 r5 @
# Condition 1: 根据3-sigma算法剔除coord speed以及较大时间间隔的点
8 k- j* b1 }" j; a0 O2 y& I is_anomaly_tmp = pd.Series(thigma_data(traj["time_array"],3)) | pd.Series(thigma_data(traj["coord_speed"],3)) , }0 n; \7 Y# f
is_anomaly = is_anomaly | is_anomaly_tmp
4 X8 N3 T2 v ?( h s9 ^9 V is_anomaly.index=traj.index 8 Y5 Y F7 p5 \. R
# Condition 2: 轨迹点的3-sigma异常处理 ' G$ N$ B# g6 M+ t, u; e$ J) ^
traj = traj[~is_anomaly].reset_index(drop=True) 9 q' H7 k0 g d2 a: F! _
is_anomaly = np.array([False] * len(traj)) . E, [. o7 m7 W- s6 J1 T, n+ G
if len(traj) != 0:
5 d9 m; K, s7 g! e lon_std, lon_mean = traj["lon"].std(), traj["lon"].mean() 2 V$ T& N2 K2 v( _; L1 K
lat_std, lat_mean = traj["lat"].std(), traj["lat"].mean() 9 x8 ^7 r& n) G5 G& z
lon_low, lon_high = lon_mean - 3 * lon_std, lon_mean + 3 * lon_std
6 i( Z! J! ^1 j0 ^% I lat_low, lat_high = lat_mean - 3 * lat_std, lat_mean + 3 * lat_std # V1 u9 n' s+ p- Z& x% V7 n
is_anomaly = is_anomaly | (traj["lon"] > lon_high) | ((traj["lon"] < lon_low)) ) h+ L) o4 f1 u1 p
is_anomaly = is_anomaly | (traj["lat"] > lat_high) | ((traj["lat"] < lat_low))
- {, g. R" k" G6 v5 O' J5 X traj = traj[~is_anomaly].reset_index(drop=True)
4 g6 J8 D6 B2 F& u$ `/ Z4 G6 M return traj, [len(is_speed_anomaly) - len(traj)] " q# e1 ~/ a( k% _
df=get_data(rC:\Users\admin\hy_round1_train_20200102,train)
% k, Q2 b8 y/ Q U #对轨迹进行异常点剔除,对nan值进行线性插值 5 m& e# C! u3 `$ e1 m
ID_list=list(pd.DataFrame(df[ID].value_counts()).index)
1 c9 V- \: K) a$ l DF_NEW=[] 9 z) O* U, s- c n$ M3 Y
Anomaly_count=[] ' P s0 w6 w5 H( v* b
for ID in tqdm(ID_list):
2 H0 f7 ?' A- {0 J df_id=compute_traj_diff_time_distance(df[df[ID]==ID]) - W. {' u; J5 W& K3 u3 c
df_new,count=assign_traj_anomaly_points_nan(df_id)
! a" Y: J$ m( R* a# @/ {+ S$ F5 | df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)
* m- {! V3 K3 v df_new = df_new.fillna(method="bfill")
; J/ z* }8 {1 K! d df_new = df_new.fillna(method="ffill")
3 [1 k3 c! I4 O df_new["speed"] = df_new["speed"].clip(0, 23) * Z3 M( l9 J/ ^' m; ]) k9 B' Y
Anomaly_count.append(count)#统计每个id异常点的数量有多少
: t5 s0 i2 O# L5 ^6 t& h DF_NEW.append(df_new)
* B+ j1 L* z8 K, V- L: j, w1 N #将数据写入到pkl格式 - Z0 o9 _; p' N: ^/ k0 F, a- Y
load_save = Load_Save_Data() 5 @; N$ \( c% _/ D. s8 ]& L. E# e
load_save.save_data(DF_NEW,"C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl") - Q# F# v" K. v+ ?6 ~/ |( U; Q
#### 三类渔船速度和方向可视化 4 P3 s2 r0 M B. Q6 @
# 把训练集的所有数据,根据类别存放到不同的数据文件中
) u+ K8 ` x# D5 G+ w def get_diff_data():
7 M; M( J- P/ [5 I Path = "C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl"
8 ?1 C* g! W9 ~" `! g$ b. m with open(Path,"rb") as f: 6 L7 h9 v) S" Z$ W
total_data = pickle.load(f)
1 T$ ]9 d f* g load_save = Load_Save_Data() / z0 S3 b; z/ N- ~" ~& h
kind_data = ["刺网","围网","拖网"] # K" I4 T4 _% m# _- l8 S5 i
file_names = ["ciwang_data.pkl","weiwang_data.pkl","tuowang_data.pkl"]
1 Y0 F0 C9 g3 F" k: ]% e' D for i,datax in enumerate(kind_data): I9 t. Z3 p7 ^- H8 i# }
data_type = [data for data in total_data if data["type"].unique()[0] == datax]
7 X) e# o: L! U$ `8 X load_save.save_data(data_type,"C:/Users/admin/wisdomOcean/data_tmp1/" + file_names[i]) 5 N- X$ ^9 M2 M, n' m3 O3 X( W( }
get_diff_data() 4 b, f+ P; | D: g1 w/ V7 P0 S# l2 J( A
#对轨迹进行异常点剔除,对nan值进行线性插值
6 _# M, q5 e# h ID_list=list(pd.DataFrame(df[ID].value_counts()).index) 1 ~5 G0 i ~. W* j3 j
DF_NEW=[]
9 ]' K. M: i5 ]( b Anomaly_count=[]
, m9 u" w" l/ a: ^ for ID in tqdm(ID_list): ; J4 G1 x I, z
df_id=compute_traj_diff_time_distance(df[df[ID]==ID])
. v, o+ r, S N df_new,count=assign_traj_anomaly_points_nan(df_id)
2 Z7 Q6 j# m$ }4 g; R7 ?, r df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)
- z3 }' A- o4 L( j) E df_new = df_new.fillna(method="bfill") # D$ Q. M+ T/ K6 b' }
df_new = df_new.fillna(method="ffill") 3 C& M# W( z( a
df_new["speed"] = df_new["speed"].clip(0, 23) 4 u2 z; @8 U5 I7 r+ c5 B3 {
Anomaly_count.append(count)#统计每个id异常点的数量有多少 5 ^" i3 q; S' E3 R
DF_NEW.append(df_new) 0 P( v5 {. Q8 s- t& W
# 每类轨迹,随机选取某个渔船,可视化速度序列和方向序列
& O1 f- s8 A5 t2 _. _$ C# ?( f- a+ x def visualize_three_traj_speed_direction(): 1 w0 A4 j, ^9 } r! ^+ O
fig,axes = plt.subplots(nrows=3,ncols=2,figsize=(20,15))
5 z3 ^/ G4 y& d# |: Z( z plt.subplots_adjust(wspace=0.3,hspace=0.3)
) J1 d: i: U- T8 j9 l" V1 s% A # 随机选出刺网的三条轨迹进行可视化
( P m8 [0 c6 P% X* Z/ n$ H file_types = ["ciwang_data","weiwang_data","tuowang_data"]
. ^8 q( t! _( f7 e0 `( X4 {0 i; D speed_types = ["ciwang_speed","weiwang_speed","tuowang_speed"]
0 O: d: L( A. ~. w: x( g6 H doirections = ["ciwang_direction","weiwang_direction","tuowang_direction"] ( k+ i! x% x3 z5 j
colors = [pink, lightblue, lightgreen]
" R+ x5 P1 I4 K9 H2 d- P8 A7 }. n for i,file_name in tqdm(enumerate(file_types)):
7 i2 E* Y, X- A' w" y+ M datax = get_random_one_traj(type=file_name) : t8 @' x+ j5 t/ ^+ g0 d+ ?! Q$ W' E
x_data = datax["速度"].loc[-1:].values
" u8 G2 w3 \: H9 E y_data = datax["方向"].loc[-1:].values ! P, C. X) x% a9 M9 J
axes[i][0].plot(range(len(x_data)), x_data, label=speed_types[i], color=colors[i]) * N, b6 e! K2 l# p2 G. K# |5 D" A
axes[i][0].grid(alpha=2)
0 }/ F, s$ Y6 P$ N' s axes[i][0].legend(loc="best") * I+ m. E( z8 O D
axes[i][1].plot(range(len(y_data)), y_data, label=doirections[i], color=colors[i])
" ^3 Q2 z) _, U, n! F- P& n axes[i][1].grid(alpha=2) , u$ {3 f6 D0 a" p
axes[i][1].legend(loc="best") ; t3 d5 c) ^' w- m+ G( N* z- t6 E
plt.show() ) g8 M; [. K( c
visualize_three_traj_speed_direction() 2 X* d- l- U0 U0 ]/ Y% a$ B" k
% T: d! [. S( R7 M8 ^
作业二:相关性分析。 4 p) N5 w w7 V" J2 `/ A) C. ~
data_train.loc[data_train[type]==刺网,type_id]=1
5 E) j Q1 H) `; E/ J3 e data_train.loc[data_train[type]==围网,type_id]=2 - e6 w8 _* D' `6 S P3 a, ]
data_train.loc[data_train[type]==拖网,type_id]=3 / n' ?6 C6 p6 f7 V' k/ b5 ?
f, ax = plt.subplots(figsize=(9, 6))
. O4 g- V J2 k5 y0 V ax = sns.heatmap(np.abs(df.corr()),annot=True)
2 `9 p& b, p2 D! W' p2 Q4 Y plt.show() 1 s. z* H0 i% V) y/ T5 b& d
0 A1 U! T) x1 L+ ^% z% R 从图中可以清楚看到,经纬度和速度跟类型相关性比较大。
% U- y, N( h, v0 k
% }3 b! A2 [# T! Z- h
+ @* r; r# y2 T# e- f; n4 u
% t/ e9 k2 J4 H% y3 N+ D
6 E) g, Q' G: n3 D |