mirror of
https://github.com/MickMake/GoSungrow.git
synced 2025-04-11 13:20:28 +02:00
v3.0.0-alpha - fixing tables
This commit is contained in:
parent
3fdd0fb2dd
commit
376751551e
207
.idea/workspace.xml
generated
207
.idea/workspace.xml
generated
@ -5,10 +5,33 @@
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="76adadc9-ae71-42a6-82a1-66dbc8ecb14c" name="Changes" comment="">
|
||||
<change afterPath="$PROJECT_DIR$/cmd/cmd_show_meta.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/EXAMPLES.md" beforeDir="false" afterPath="$PROJECT_DIR$/EXAMPLES.md" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/examples.txt" beforeDir="false" afterPath="$PROJECT_DIR$/examples.txt" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/cmd/cmd_show.go" beforeDir="false" afterPath="$PROJECT_DIR$/cmd/cmd_show.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/cmd/cmd_show_device.go" beforeDir="false" afterPath="$PROJECT_DIR$/cmd/cmd_show_device.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/cmd/cmd_show_psid.go" beforeDir="false" afterPath="$PROJECT_DIR$/cmd/cmd_show_psid.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/docs/data/getPsList.response.json" beforeDir="false" afterPath="$PROJECT_DIR$/docs/data/getPsList.response.json" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/go.mod" beforeDir="false" afterPath="$PROJECT_DIR$/go.mod" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/go.sum" beforeDir="false" afterPath="$PROJECT_DIR$/go.sum" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/exportParamSettingValPDF/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/exportParamSettingValPDF/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/exportPlantReportPDF/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/exportPlantReportPDF/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/getApiCallsForAppkeys/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/getApiCallsForAppkeys/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/getPowerDeviceSetTaskDetailList/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/getPowerDeviceSetTaskDetailList/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/getPsDataSupplementTaskList/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/getPsDataSupplementTaskList/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/getUpTimePoint/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/getUpTimePoint/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/AppService/queryParamSettingTask/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/AppService/queryParamSettingTask/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/WebAppService/getLoadCurveList/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/WebAppService/getLoadCurveList/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_table.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_table.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/api/struct_area.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/api/struct_area.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/api/struct_areas.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/api/struct_areas.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/api/struct_data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/api/struct_data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/data.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/data.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/data_request.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/data_request.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/highlevel_device.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/highlevel_device.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/highlevel_meta.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/highlevel_meta.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/highlevel_point.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/highlevel_point.go" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/iSolarCloud/highlevel_ps.go" beforeDir="false" afterPath="$PROJECT_DIR$/iSolarCloud/highlevel_ps.go" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
@ -82,7 +105,7 @@
|
||||
<configuration name="GoSungrow" type="GoApplicationRunConfiguration" factoryName="Go Application">
|
||||
<module name="GoSungrow" />
|
||||
<working_directory value="$PROJECT_DIR$" />
|
||||
<parameters value="data md AppService.getPowerDevicePointNames DeviceType:14" />
|
||||
<parameters value="show device points" />
|
||||
<envs>
|
||||
<env name="GOCACHE" value="/Volumes/Media/GoCache" />
|
||||
</envs>
|
||||
@ -186,11 +209,6 @@
|
||||
<line>56</line>
|
||||
<option name="timeStamp" value="3780" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_table.go</url>
|
||||
<line>68</line>
|
||||
<option name="timeStamp" value="3787" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/AppService/getAllPowerRobotViewInfoByPsId/data.go</url>
|
||||
<line>41</line>
|
||||
@ -373,7 +391,7 @@
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>580</line>
|
||||
<line>637</line>
|
||||
<option name="timeStamp" value="7135" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
@ -453,7 +471,7 @@
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data_request.go</url>
|
||||
<line>508</line>
|
||||
<line>509</line>
|
||||
<option name="timeStamp" value="7380" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
@ -473,7 +491,7 @@
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data_request.go</url>
|
||||
<line>817</line>
|
||||
<line>818</line>
|
||||
<option name="timeStamp" value="7600" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
@ -496,46 +514,11 @@
|
||||
<line>757</line>
|
||||
<option name="timeStamp" value="7735" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_table.go</url>
|
||||
<line>108</line>
|
||||
<option name="timeStamp" value="7740" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>585</line>
|
||||
<option name="timeStamp" value="7744" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>674</line>
|
||||
<option name="timeStamp" value="7753" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>673</line>
|
||||
<option name="timeStamp" value="7754" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>637</line>
|
||||
<option name="timeStamp" value="7771" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>594</line>
|
||||
<option name="timeStamp" value="7772" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/WebIscmAppService/queryDeviceListForBackSys/data.go</url>
|
||||
<line>55</line>
|
||||
<option name="timeStamp" value="7821" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>549</line>
|
||||
<option name="timeStamp" value="7883" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_point.go</url>
|
||||
<line>547</line>
|
||||
@ -546,21 +529,6 @@
|
||||
<line>525</line>
|
||||
<option name="timeStamp" value="7902" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>791</line>
|
||||
<option name="timeStamp" value="7919" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>782</line>
|
||||
<option name="timeStamp" value="7921" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>777</line>
|
||||
<option name="timeStamp" value="7935" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/struct_table.go</url>
|
||||
<line>478</line>
|
||||
@ -573,7 +541,7 @@
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>326</line>
|
||||
<line>325</line>
|
||||
<option name="timeStamp" value="7960" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
@ -626,11 +594,6 @@
|
||||
<line>261</line>
|
||||
<option name="timeStamp" value="8015" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/cmd/cmd_show_device.go</url>
|
||||
<line>96</line>
|
||||
<option name="timeStamp" value="8032" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/cmd/cmd_show_device.go</url>
|
||||
<line>101</line>
|
||||
@ -641,41 +604,6 @@
|
||||
<line>61</line>
|
||||
<option name="timeStamp" value="8039" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>795</line>
|
||||
<option name="timeStamp" value="8044" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>165</line>
|
||||
<option name="timeStamp" value="8045" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>179</line>
|
||||
<option name="timeStamp" value="8046" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>247</line>
|
||||
<option name="timeStamp" value="8048" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>319</line>
|
||||
<option name="timeStamp" value="8049" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>369</line>
|
||||
<option name="timeStamp" value="8051" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/data.go</url>
|
||||
<line>372</line>
|
||||
<option name="timeStamp" value="8052" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_endpoints.go</url>
|
||||
<line>36</line>
|
||||
@ -686,16 +614,76 @@
|
||||
<line>52</line>
|
||||
<option name="timeStamp" value="8064" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_output.go</url>
|
||||
<line>204</line>
|
||||
<option name="timeStamp" value="8065" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_output.go</url>
|
||||
<line>230</line>
|
||||
<option name="timeStamp" value="8066" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_areas.go</url>
|
||||
<line>149</line>
|
||||
<option name="timeStamp" value="8067" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_areas.go</url>
|
||||
<line>175</line>
|
||||
<option name="timeStamp" value="8072" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>485</line>
|
||||
<option name="timeStamp" value="8080" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_point.go</url>
|
||||
<line>415</line>
|
||||
<option name="timeStamp" value="8082" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/GoStruct/output/struct_table.go</url>
|
||||
<line>482</line>
|
||||
<option name="timeStamp" value="8097" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/tablib/gotabulate/tabulate.go</url>
|
||||
<line>203</line>
|
||||
<option name="timeStamp" value="8125" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>591</line>
|
||||
<option name="timeStamp" value="8129" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>509</line>
|
||||
<option name="timeStamp" value="8144" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/api/struct_data.go</url>
|
||||
<line>118</line>
|
||||
<option name="timeStamp" value="8152" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_device.go</url>
|
||||
<line>149</line>
|
||||
<option name="timeStamp" value="8157" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_device.go</url>
|
||||
<line>105</line>
|
||||
<option name="timeStamp" value="8159" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_device.go</url>
|
||||
<line>142</line>
|
||||
<option name="timeStamp" value="8160" />
|
||||
</line-breakpoint>
|
||||
<line-breakpoint enabled="true" type="DlvLineBreakpoint">
|
||||
<url>file://$PROJECT_DIR$/iSolarCloud/highlevel_device.go</url>
|
||||
<line>139</line>
|
||||
<option name="timeStamp" value="8165" />
|
||||
</line-breakpoint>
|
||||
</breakpoints>
|
||||
<default-breakpoints>
|
||||
<breakpoint type="DlvErrorBreakpoint" />
|
||||
@ -703,8 +691,9 @@
|
||||
</breakpoint-manager>
|
||||
<watches-manager>
|
||||
<configuration name="GoApplicationRunConfiguration">
|
||||
<watch expression="Child.DataStructure.Endpoint" language="go" />
|
||||
<watch expression="cmds" language="go" />
|
||||
<watch expression="t.tablib" language="go" />
|
||||
<watch expression="d" language="go" />
|
||||
<watch expression="table.tablib" language="go" />
|
||||
</configuration>
|
||||
</watches-manager>
|
||||
</component>
|
||||
|
@ -53,10 +53,11 @@ func (c *CmdShow) AttachCommand(cmd *cobra.Command) *cobra.Command {
|
||||
cmd.AddCommand(c.SelfCmd)
|
||||
c.SelfCmd.Example = cmdHelp.PrintExamples(c.SelfCmd, "")
|
||||
|
||||
c.AttachPsId(c.SelfCmd)
|
||||
c.AttachPs(c.SelfCmd)
|
||||
c.AttachDevice(c.SelfCmd)
|
||||
c.AttachTemplate(c.SelfCmd)
|
||||
c.AttachPoint(c.SelfCmd)
|
||||
c.AttachMeta(c.SelfCmd)
|
||||
}
|
||||
return c.SelfCmd
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ func (c *CmdShow) AttachDevicePoints(cmd *cobra.Command) *cobra.Command {
|
||||
DisableFlagsInUseLine: false,
|
||||
PreRunE: cmds.SunGrowArgs,
|
||||
RunE: c.funcDevicePoints,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
}
|
||||
cmd.AddCommand(self)
|
||||
self.Example = cmdHelp.PrintExamples(self,
|
||||
|
66
cmd/cmd_show_meta.go
Normal file
66
cmd/cmd_show_meta.go
Normal file
@ -0,0 +1,66 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"github.com/MickMake/GoUnify/cmdHelp"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
|
||||
func (c *CmdShow) AttachMeta(cmd *cobra.Command) *cobra.Command {
|
||||
for range Only.Once {
|
||||
var self = &cobra.Command{
|
||||
Use: "meta",
|
||||
Aliases: []string{},
|
||||
Annotations: map[string]string{"group": "Meta"},
|
||||
Short: fmt.Sprintf("Meta related Sungrow commands."),
|
||||
Long: fmt.Sprintf("Meta related Sungrow commands."),
|
||||
DisableFlagParsing: false,
|
||||
DisableFlagsInUseLine: false,
|
||||
PreRunE: cmds.SunGrowArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return cmd.Help()
|
||||
},
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
}
|
||||
cmd.AddCommand(self)
|
||||
self.Example = cmdHelp.PrintExamples(self, "")
|
||||
|
||||
c.AttachMetaList(self)
|
||||
// c.AttachMetaTree(self)
|
||||
// c.AttachMetaPoints(self)
|
||||
// c.AttachMetaData(self)
|
||||
// c.AttachMetaGraph(self)
|
||||
}
|
||||
return c.SelfCmd
|
||||
}
|
||||
|
||||
|
||||
func (c *CmdShow) AttachMetaList(cmd *cobra.Command) *cobra.Command {
|
||||
var self = &cobra.Command{
|
||||
Use: "unit-list",
|
||||
Aliases: []string{},
|
||||
Annotations: map[string]string{"group": "Meta"},
|
||||
Short: fmt.Sprintf("Show all unit lists."),
|
||||
Long: fmt.Sprintf("Show all unit lists."),
|
||||
DisableFlagParsing: false,
|
||||
DisableFlagsInUseLine: false,
|
||||
PreRunE: cmds.SunGrowArgs,
|
||||
RunE: c.funcMetaList,
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
}
|
||||
cmd.AddCommand(self)
|
||||
self.Example = cmdHelp.PrintExamples(self, "")
|
||||
|
||||
return cmd
|
||||
}
|
||||
func (c *CmdShow) funcMetaList(_ *cobra.Command, _ []string) error {
|
||||
for range Only.Once {
|
||||
c.Error = cmds.Api.SunGrow.MetaUnitList()
|
||||
if c.Error != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
return c.Error
|
||||
}
|
@ -10,7 +10,7 @@ import (
|
||||
)
|
||||
|
||||
|
||||
func (c *CmdShow) AttachPsId(cmd *cobra.Command) *cobra.Command {
|
||||
func (c *CmdShow) AttachPs(cmd *cobra.Command) *cobra.Command {
|
||||
for range Only.Once {
|
||||
var self = &cobra.Command{
|
||||
Use: "ps",
|
||||
|
@ -59,8 +59,8 @@
|
||||
"fault_alarm_offline_dev_count":0,
|
||||
"fault_count":0,
|
||||
"fault_dev_count":0,
|
||||
"gcj_latitude":"-33.732621764598136",
|
||||
"gcj_longitude":"151.04299275383877",
|
||||
"gcj_latitude":"",
|
||||
"gcj_longitude":"",
|
||||
"gprs_latitude":null,
|
||||
"gprs_longitude":null,
|
||||
"images":[
|
||||
@ -87,11 +87,11 @@
|
||||
"is_bank_ps":0,
|
||||
"is_tuv":0,
|
||||
"join_year_init_elec":0.0000,
|
||||
"latitude":-35.4429347,
|
||||
"latitude":,
|
||||
"location":"HERE",
|
||||
"longitude":147.7136952,
|
||||
"map_latitude":"-35.4429347",
|
||||
"map_longitude":"147.7136952",
|
||||
"longitude":,
|
||||
"map_latitude":"",
|
||||
"map_longitude":"",
|
||||
"mlpe_flag":0,
|
||||
"nmi":"",
|
||||
"offline_dev_count":0,
|
||||
@ -185,9 +185,9 @@
|
||||
"value":"55.2"
|
||||
},
|
||||
"valid_flag":1,
|
||||
"wgs_latitude":-33.73262176459814,
|
||||
"wgs_longitude":151.04299275383877,
|
||||
"zip_code":"2125"
|
||||
"wgs_latitude":,
|
||||
"wgs_longitude":,
|
||||
"zip_code":""
|
||||
}
|
||||
],
|
||||
"rowCount":1
|
||||
|
2
go.mod
2
go.mod
@ -22,7 +22,6 @@ require (
|
||||
github.com/MickMake/GoUnify/cmdHelp v0.0.0-20221125023651-ff4a37b1928a
|
||||
github.com/MickMake/GoUnify/cmdLog v0.0.0-20221125023651-ff4a37b1928a
|
||||
github.com/MickMake/GoUnify/cmdPath v0.0.0-20221125023651-ff4a37b1928a
|
||||
github.com/agrison/go-tablib v0.0.0-20160310143025-4930582c22ee
|
||||
github.com/eclipse/paho.mqtt.golang v1.4.2
|
||||
github.com/go-co-op/gocron v1.18.0
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
@ -47,7 +46,6 @@ require (
|
||||
github.com/agrison/mxj v0.0.0-20160310142625-1269f8afb3b4 // indirect
|
||||
github.com/alecthomas/chroma v0.7.1 // indirect
|
||||
github.com/blang/semver v3.5.1+incompatible // indirect
|
||||
github.com/bndr/gotabulate v1.1.2 // indirect
|
||||
github.com/briandowns/spinner v1.19.0 // indirect
|
||||
github.com/clbanning/mxj v1.8.4 // indirect
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect
|
||||
|
4
go.sum
4
go.sum
@ -70,8 +70,6 @@ github.com/abiosoft/ishell/v2 v2.0.2 h1:5qVfGiQISaYM8TkbBl7RFO6MddABoXpATrsFbVI+
|
||||
github.com/abiosoft/ishell/v2 v2.0.2/go.mod h1:E4oTCXfo6QjoCart0QYa5m9w4S+deXs/P/9jA77A9Bs=
|
||||
github.com/abiosoft/readline v0.0.0-20180607040430-155bce2042db h1:CjPUSXOiYptLbTdr1RceuZgSFDQ7U15ITERUGrUORx8=
|
||||
github.com/abiosoft/readline v0.0.0-20180607040430-155bce2042db/go.mod h1:rB3B4rKii8V21ydCbIzH5hZiCQE7f5E9SzUb/ZZx530=
|
||||
github.com/agrison/go-tablib v0.0.0-20160310143025-4930582c22ee h1:0RklYSvekYaIFI9JUx7TFPQvo++TdILmZiV17QI4nXk=
|
||||
github.com/agrison/go-tablib v0.0.0-20160310143025-4930582c22ee/go.mod h1:M9nmO4lBRWR/bBv7UCOmDJ1MB2DVoqz19B4JchDA+K0=
|
||||
github.com/agrison/mxj v0.0.0-20160310142625-1269f8afb3b4 h1:XBNSe5eibe5Fh131ah+xnO6s4A97U1T3tKZKLQQvqu0=
|
||||
github.com/agrison/mxj v0.0.0-20160310142625-1269f8afb3b4/go.mod h1:n7qJAqL9BKqGqiJyjPbWtxpdswTL5wX0IVP2Uw4vVhQ=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
@ -85,8 +83,6 @@ github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkx
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
|
||||
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
|
||||
github.com/bndr/gotabulate v1.1.2 h1:yC9izuZEphojb9r+KYL4W9IJKO/ceIO8HDwxMA24U4c=
|
||||
github.com/bndr/gotabulate v1.1.2/go.mod h1:0+8yUgaPTtLRTjf49E8oju7ojpU11YmXyvq1LbPAb3U=
|
||||
github.com/briandowns/spinner v1.19.0 h1:s8aq38H+Qju89yhp89b4iIiMzMm8YN3p6vGpwyh/a8E=
|
||||
github.com/briandowns/spinner v1.19.0/go.mod h1:mQak9GHqbspjC/5iUx3qMlIho8xBS/ppAL/hX5SmPJU=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
|
@ -3,8 +3,8 @@ package exportParamSettingValPDF
|
||||
import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -13,6 +13,7 @@ const Disabled = false
|
||||
const EndPointName = "AppService.exportParamSettingValPDF"
|
||||
|
||||
type RequestData struct {
|
||||
TaskId valueTypes.String `json:"task_id" required:"true"`
|
||||
}
|
||||
|
||||
func (rd RequestData) IsValid() error {
|
||||
@ -25,8 +26,9 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
|
||||
type ResultData struct {
|
||||
// Dummy valueTypes.String `json:"dummy"`
|
||||
type ResultData struct {
|
||||
Code valueTypes.Integer `json:"code"`
|
||||
ReturnValPdfURL valueTypes.String `json:"return_val_pdf_url"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
@ -34,31 +36,8 @@ func (e *ResultData) IsValid() error {
|
||||
return err
|
||||
}
|
||||
|
||||
//type DecodeResultData ResultData
|
||||
//
|
||||
//func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ package exportPlantReportPDF
|
||||
import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -13,6 +13,7 @@ const Disabled = false
|
||||
const EndPointName = "AppService.exportPlantReportPDF"
|
||||
|
||||
type RequestData struct {
|
||||
PsId valueTypes.PsId `json:"ps_id" required:"true"`
|
||||
}
|
||||
|
||||
func (rd RequestData) IsValid() error {
|
||||
@ -25,8 +26,8 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
|
||||
type ResultData struct {
|
||||
// Dummy valueTypes.String `json:"dummy"`
|
||||
type ResultData struct {
|
||||
Code valueTypes.String `json:"code"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
@ -34,31 +35,8 @@ func (e *ResultData) IsValid() error {
|
||||
return err
|
||||
}
|
||||
|
||||
//type DecodeResultData ResultData
|
||||
//
|
||||
//func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -25,8 +24,7 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
|
||||
type ResultData struct {
|
||||
// Dummy valueTypes.String `json:"dummy"`
|
||||
type ResultData []struct {
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
@ -34,31 +32,9 @@ func (e *ResultData) IsValid() error {
|
||||
return err
|
||||
}
|
||||
|
||||
//type DecodeResultData ResultData
|
||||
//
|
||||
//func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -13,9 +12,9 @@ const Disabled = false
|
||||
const EndPointName = "AppService.getPowerDeviceSetTaskDetailList"
|
||||
|
||||
type RequestData struct {
|
||||
QueryType valueTypes.String `json:"query_type" required:"true"`
|
||||
TaskId valueTypes.String `json:"task_id" required:"true"`
|
||||
Uuid valueTypes.String `json:"uuid" required:"true"`
|
||||
QueryType valueTypes.String `json:"query_type" required:"true"`
|
||||
TaskId valueTypes.String `json:"task_id" required:"true"`
|
||||
Uuid valueTypes.Integer `json:"uuid" required:"true"`
|
||||
}
|
||||
|
||||
func (rd RequestData) IsValid() error {
|
||||
@ -28,48 +27,119 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
type ResultData struct {
|
||||
DeviceList []interface{} `json:"device_list"`
|
||||
PageList []interface{} `json:"pageList" PointId:"page_list" PointIdReplace:"true" PointArrayFlatten:"false"`
|
||||
PsNameInfoList []interface{} `json:"ps_name_info_list"`
|
||||
RowCount valueTypes.Integer `json:"rowCount" PointId:"row_count"`
|
||||
DeviceList []struct {
|
||||
ChannelId valueTypes.Integer `json:"chnnl_id" PointId:"channel_id"`
|
||||
CountryId valueTypes.Integer `json:"country_id"`
|
||||
DataFlagDetail valueTypes.Integer `json:"data_flag_detail"`
|
||||
DeviceArea valueTypes.String `json:"device_area"`
|
||||
DeviceAreaName valueTypes.String `json:"device_area_name"`
|
||||
DeviceModel valueTypes.String `json:"device_model"`
|
||||
DeviceModelCode valueTypes.String `json:"device_model_code"`
|
||||
DeviceModelId valueTypes.Integer `json:"device_model_id"`
|
||||
DeviceName valueTypes.String `json:"device_name"`
|
||||
DeviceProSn valueTypes.String `json:"device_pro_sn"`
|
||||
DeviceType valueTypes.Integer `json:"device_type"`
|
||||
GridTypeId valueTypes.Integer `json:"grid_type_id"`
|
||||
PsName valueTypes.String `json:"ps_name"`
|
||||
PsShortName valueTypes.String `json:"ps_short_name"`
|
||||
Sn valueTypes.String `json:"sn"`
|
||||
|
||||
IsHaveversion valueTypes.Bool `json:"is_haveversion"`
|
||||
BatVersion valueTypes.String `json:"bat_version"`
|
||||
LcdVersion valueTypes.String `json:"lcd_version"`
|
||||
MachineVersion valueTypes.String `json:"machine_version"`
|
||||
MdspVersion valueTypes.String `json:"mdsp_version"`
|
||||
SdspVersion valueTypes.String `json:"sdsp_version"`
|
||||
Version1 valueTypes.String `json:"version1"`
|
||||
Version2 valueTypes.String `json:"version2"`
|
||||
Version3 valueTypes.String `json:"version3"`
|
||||
Version4 valueTypes.String `json:"version4"`
|
||||
Version5 valueTypes.String `json:"version5"`
|
||||
Version6 valueTypes.String `json:"version6"`
|
||||
Version7 valueTypes.String `json:"version7"`
|
||||
Version8 valueTypes.String `json:"version8"`
|
||||
Version9 valueTypes.String `json:"version9"`
|
||||
Version10 valueTypes.String `json:"version10"`
|
||||
Version11 valueTypes.String `json:"version11"`
|
||||
Version12 valueTypes.String `json:"version12"`
|
||||
} `json:"device_list" DataTable:"true" DataTableIndex:"true"`
|
||||
PageList []struct {
|
||||
// GoStruct.GoStructParent `json:"-" DataTable:"true" DataTableSortOn:"PsId"`
|
||||
GoStruct.GoStruct `json:"-" PointIdFrom:"PsId" PointIdReplace:"true"`
|
||||
|
||||
PsId valueTypes.Integer `json:"ps_id"`
|
||||
PsName valueTypes.String `json:"ps_name"`
|
||||
ChannelId valueTypes.Integer `json:"chnnl_id" PointId:"channel_id"`
|
||||
DeviceType valueTypes.Integer `json:"device_type"`
|
||||
Sn valueTypes.String `json:"sn"`
|
||||
|
||||
PointId valueTypes.Integer `json:"point_id"`
|
||||
PointIdType valueTypes.Integer `json:"point_id_type"`
|
||||
PointName valueTypes.String `json:"point_name"`
|
||||
Unit valueTypes.String `json:"unit"`
|
||||
|
||||
CommandStatus valueTypes.Integer `json:"command_status"`
|
||||
CountryId valueTypes.Integer `json:"country_id"`
|
||||
CreateTime valueTypes.DateTime `json:"create_time"`
|
||||
DataFlagDetail valueTypes.Integer `json:"data_flag_detail"`
|
||||
DeviceArea valueTypes.String `json:"device_area"`
|
||||
DeviceAreaName valueTypes.String `json:"device_area_name"`
|
||||
DeviceModel valueTypes.String `json:"device_model"`
|
||||
DeviceModelCode valueTypes.String `json:"device_model_code"`
|
||||
DeviceModelId valueTypes.Integer `json:"device_model_id"`
|
||||
DeviceName valueTypes.String `json:"device_name"`
|
||||
DeviceProSn valueTypes.String `json:"device_pro_sn"`
|
||||
DeviceUUID valueTypes.Integer `json:"device_uuid"`
|
||||
GridTypeId valueTypes.Integer `json:"grid_type_id"`
|
||||
ModbusAddress valueTypes.String `json:"modbus_address"`
|
||||
Module valueTypes.Integer `json:"module"`
|
||||
PsShortName valueTypes.String `json:"ps_short_name"`
|
||||
ReturnValue valueTypes.String `json:"return_value"`
|
||||
SetPrecision valueTypes.String `json:"set_precision"`
|
||||
SetValName valueTypes.String `json:"set_val_name"`
|
||||
SetValNameVal valueTypes.String `json:"set_val_name_val"`
|
||||
SetValue valueTypes.String `json:"set_value"`
|
||||
TaskDetailCommandType valueTypes.String `json:"task_detail_command_type"`
|
||||
TaskDetailId valueTypes.Integer `json:"task_detail_id"`
|
||||
TaskId valueTypes.Integer `json:"task_id"`
|
||||
TaskType valueTypes.Integer `json:"task_type"`
|
||||
UpdateTime valueTypes.DateTime `json:"update_time"`
|
||||
ValType valueTypes.String `json:"val_type"`
|
||||
|
||||
IsHaveversion valueTypes.Bool `json:"is_haveversion"`
|
||||
BatVersion valueTypes.String `json:"bat_version"`
|
||||
LcdVersion valueTypes.String `json:"lcd_version"`
|
||||
MachineVersion valueTypes.String `json:"machine_version"`
|
||||
MdspVersion valueTypes.String `json:"mdsp_version"`
|
||||
SdspVersion valueTypes.String `json:"sdsp_version"`
|
||||
Version1 valueTypes.String `json:"version1"`
|
||||
Version2 valueTypes.String `json:"version2"`
|
||||
Version3 valueTypes.String `json:"version3"`
|
||||
Version4 valueTypes.String `json:"version4"`
|
||||
Version5 valueTypes.String `json:"version5"`
|
||||
Version6 valueTypes.String `json:"version6"`
|
||||
Version7 valueTypes.String `json:"version7"`
|
||||
Version8 valueTypes.String `json:"version8"`
|
||||
Version9 valueTypes.String `json:"version9"`
|
||||
Version10 valueTypes.String `json:"version10"`
|
||||
Version11 valueTypes.String `json:"version11"`
|
||||
Version12 valueTypes.String `json:"version12"`
|
||||
} `json:"pageList" PointId:"devices" PointIdReplace:"true"`
|
||||
PsNameInfoList []struct {
|
||||
PsId valueTypes.Integer `json:"ps_id"`
|
||||
PsName valueTypes.String `json:"ps_name"`
|
||||
PsShortName valueTypes.String `json:"ps_short_name"`
|
||||
} `json:"ps_name_info_list" DataTable:"true" DataTableIndex:"true"`
|
||||
RowCount valueTypes.Integer `json:"rowCount" PointId:"row_count"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
var err error
|
||||
// switch {
|
||||
// case e.Dummy == "":
|
||||
// break
|
||||
// default:
|
||||
// err = errors.New(fmt.Sprintf("unknown error '%s'", e.Dummy))
|
||||
// }
|
||||
return err
|
||||
}
|
||||
|
||||
// type DecodeResultData ResultData
|
||||
//
|
||||
// func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
// }
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -25,46 +24,17 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
type ResultData struct {
|
||||
PageList interface{} `json:"pageList" PointId:"page_list" PointIdReplace:"true" PointArrayFlatten:"false"`
|
||||
PageList interface{} `json:"pageList" PointId:"page_list" PointIdReplace:"true" PointArrayFlatten:"false"`
|
||||
RowCount valueTypes.Integer `json:"rowCount" PointId:"row_count"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
var err error
|
||||
// switch {
|
||||
// case e.Dummy == "":
|
||||
// break
|
||||
// default:
|
||||
// err = errors.New(fmt.Sprintf("unknown error '%s'", e.Dummy))
|
||||
// }
|
||||
return err
|
||||
}
|
||||
|
||||
// type DecodeResultData ResultData
|
||||
//
|
||||
// func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
// }
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -27,12 +27,13 @@ func (rd RequestData) Help() string {
|
||||
type ResultData struct {
|
||||
PointTimeRelation []struct {
|
||||
UpTimePointId valueTypes.Integer `json:"up_time_point_id"`
|
||||
Is24Hour valueTypes.Bool `json:"is_24_hour"`
|
||||
|
||||
PointList []struct {
|
||||
PointId valueTypes.Integer `json:"point_id"`
|
||||
TimeType valueTypes.Integer `json:"time_type"`
|
||||
} `json:"point_list"` // DataTable:"true" DataTablePivot:"true"`
|
||||
Is24Hour valueTypes.Bool `json:"is_24_hour"`
|
||||
} `json:"point_time_relation" DataTable:"true"`
|
||||
} `json:"point_list" DataTable:"true"` // DataTablePivot:"true"`
|
||||
} `json:"point_time_relation"` // DataTable:"true"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -13,8 +12,8 @@ const Disabled = false
|
||||
const EndPointName = "AppService.queryParamSettingTask"
|
||||
|
||||
type RequestData struct {
|
||||
TaskId valueTypes.String `json:"task_id" required:"true"`
|
||||
Uuid valueTypes.String `json:"uuid" required:"true"`
|
||||
TaskId valueTypes.String `json:"task_id" required:"true"`
|
||||
Uuid valueTypes.Integer `json:"uuid" required:"true"`
|
||||
}
|
||||
|
||||
func (rd RequestData) IsValid() error {
|
||||
@ -27,45 +26,41 @@ func (rd RequestData) Help() string {
|
||||
}
|
||||
|
||||
type ResultData struct {
|
||||
// Dummy valueTypes.String `json:"dummy"`
|
||||
TaskId valueTypes.Integer `json:"task_id"`
|
||||
TaskName valueTypes.String `json:"task_name"`
|
||||
CommandStatus valueTypes.Integer `json:"command_status"`
|
||||
CreateTime valueTypes.DateTime `json:"create_time" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
CreateTimeZone valueTypes.DateTime `json:"create_time_zone" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
OverTime valueTypes.DateTime `json:"over_time" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
OverTimeZone valueTypes.DateTime `json:"over_time_zone" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
|
||||
ParamList []struct {
|
||||
// GoStruct.GoStructParent `json:"-" DataTable:"true" DataTableIndex:"true"`
|
||||
|
||||
PointId valueTypes.PointId `json:"point_id"`
|
||||
PointName valueTypes.String `json:"point_name"`
|
||||
Unit valueTypes.String `json:"unit"`
|
||||
SetValue valueTypes.String `json:"set_value"`
|
||||
SetPrecision valueTypes.String `json:"set_precision"`
|
||||
ReturnValue valueTypes.String `json:"return_value"`
|
||||
SetValName valueTypes.String `json:"set_val_name"`
|
||||
SetValNameVal valueTypes.String `json:"set_val_name_val"`
|
||||
CommandStatus valueTypes.Integer `json:"command_status"`
|
||||
CreateTime valueTypes.DateTime `json:"create_time" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
CreateTimeZone valueTypes.DateTime `json:"create_time_zone" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
UpdateTime valueTypes.DateTime `json:"update_time" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
UpdateTimeZone valueTypes.DateTime `json:"update_time_zone" PointNameDateFormat:"2006-01-02 15:04:05"`
|
||||
ParamCode valueTypes.String `json:"param_code"`
|
||||
} `json:"param_list" DataTable:"true" DataTableIndex:"true"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
var err error
|
||||
// switch {
|
||||
// case e.Dummy == "":
|
||||
// break
|
||||
// default:
|
||||
// err = errors.New(fmt.Sprintf("unknown error '%s'", e.Dummy))
|
||||
// }
|
||||
return err
|
||||
}
|
||||
|
||||
// type DecodeResultData ResultData
|
||||
//
|
||||
// func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
// }
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ package getLoadCurveList
|
||||
import (
|
||||
"GoSungrow/iSolarCloud/api"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
@ -13,6 +13,8 @@ const Disabled = false
|
||||
const EndPointName = "WebAppService.getLoadCurveList"
|
||||
|
||||
type RequestData struct {
|
||||
PsId valueTypes.PsId `json:"ps_id" required:"true"`
|
||||
MonthDate2 valueTypes.Integer `json:"monthDate" required:"true"`
|
||||
}
|
||||
|
||||
func (rd RequestData) IsValid() error {
|
||||
@ -24,9 +26,9 @@ func (rd RequestData) Help() string {
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
type ResultData struct {
|
||||
// Dummy valueTypes.String `json:"dummy"`
|
||||
type ResultData struct {
|
||||
DayList []interface{} `json:"dayList"`
|
||||
PsKey valueTypes.String `json:"psKey"`
|
||||
}
|
||||
|
||||
func (e *ResultData) IsValid() error {
|
||||
@ -34,31 +36,8 @@ func (e *ResultData) IsValid() error {
|
||||
return err
|
||||
}
|
||||
|
||||
//type DecodeResultData ResultData
|
||||
//
|
||||
//func (e *ResultData) UnmarshalJSON(data []byte) error {
|
||||
// var err error
|
||||
//
|
||||
// for range Only.Once {
|
||||
// if len(data) == 0 {
|
||||
// break
|
||||
// }
|
||||
// var pd DecodeResultData
|
||||
//
|
||||
// // Store ResultData
|
||||
// _ = json.Unmarshal(data, &pd)
|
||||
// e.Dummy = pd.Dummy
|
||||
// }
|
||||
//
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (e *EndPoint) GetData() api.DataMap {
|
||||
entries := api.NewDataMap()
|
||||
|
||||
for range Only.Once {
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
}
|
||||
|
||||
entries.StructToDataMap(*e, "", GoStruct.EndPointPath{})
|
||||
return entries
|
||||
}
|
||||
|
@ -2,10 +2,10 @@ package output
|
||||
|
||||
import (
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/gojson"
|
||||
"GoSungrow/tablib"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
tabular "github.com/agrison/go-tablib"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
@ -36,40 +36,113 @@ func (t *Tables) Sort() []string {
|
||||
|
||||
|
||||
type Table struct {
|
||||
name string
|
||||
filePrefix string
|
||||
title string
|
||||
table *tabular.Dataset
|
||||
graph *Chart
|
||||
json []byte
|
||||
raw []byte
|
||||
OutputType OutputType
|
||||
saveAsFile bool
|
||||
name string
|
||||
filePrefix string
|
||||
title string
|
||||
graph *Chart
|
||||
json []byte
|
||||
raw []byte
|
||||
OutputType OutputType
|
||||
saveAsFile bool
|
||||
graphFilter string
|
||||
Error error
|
||||
Error error
|
||||
|
||||
tablib *tablib.Dataset
|
||||
// tabular tabular.RenderTable
|
||||
// tablewriter *tablewriter.Table
|
||||
// buf *bytes.Buffer
|
||||
// headers []string
|
||||
// method int8
|
||||
}
|
||||
|
||||
func NewTable(headers ...string) Table {
|
||||
return Table {
|
||||
filePrefix: "",
|
||||
title: "",
|
||||
table: tabular.NewDataset(headers),
|
||||
Error: nil,
|
||||
// buf := new(bytes.Buffer)
|
||||
t := Table {
|
||||
filePrefix: "",
|
||||
title: "",
|
||||
tablib: tablib.NewDataset(headers),
|
||||
// tabular: tabular.New("utf8-heavy"),
|
||||
// tablewriter: tablewriter.NewWriter(buf),
|
||||
// buf: buf,
|
||||
// headers: []string{},
|
||||
// method: MethodTablib,
|
||||
|
||||
Error: nil,
|
||||
}
|
||||
|
||||
t.tablib.SetAlign(tablib.AlignLeft)
|
||||
t.tablib.SetEmptyString(" ")
|
||||
t.tablib.SetMaxCellSize(128)
|
||||
t.tablib.SetWrapStrings(true)
|
||||
t.tablib.SetFloatFormat('f')
|
||||
t.tablib.SetWrapDelimiter(' ')
|
||||
t.tablib.SetDenseMode(true)
|
||||
t.tablib.SetSplitConcat(" ")
|
||||
|
||||
// var h1 []interface{}
|
||||
// for _, h2 := range headers {
|
||||
// h1 = append(h1, h2)
|
||||
// }
|
||||
// t.tabular.AddHeaders(h1...)
|
||||
// t.tablewriter.SetHeader(headers)
|
||||
// t.headers = headers
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *Table) String() string {
|
||||
// const (
|
||||
// MethodTablib = iota
|
||||
// MethodTabular = iota
|
||||
// MethodTableWriter = iota
|
||||
// )
|
||||
// func (t *Table) SetTablib() {
|
||||
// t.method = MethodTablib
|
||||
// }
|
||||
// func (t *Table) Tablib() *tablib.Dataset {
|
||||
// return t.tablib
|
||||
// }
|
||||
//
|
||||
// func (t *Table) SetTabular() {
|
||||
// t.method = MethodTabular
|
||||
// }
|
||||
// func (t *Table) Tabular() tabular.RenderTable {
|
||||
// return t.tabular
|
||||
// }
|
||||
//
|
||||
// func (t *Table) SetTableWriter() {
|
||||
// t.method = MethodTableWriter
|
||||
// }
|
||||
// func (t *Table) TableWriter() *tablewriter.Table {
|
||||
// return t.tablewriter
|
||||
// }
|
||||
|
||||
func (t Table) String() string {
|
||||
var ret string
|
||||
for range Only.Once {
|
||||
if t == nil {
|
||||
break
|
||||
}
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// // Example: GoSungrow api ls endpoints / GoSungrow api ls areas
|
||||
// if t.buf == nil {
|
||||
// break
|
||||
// }
|
||||
// t.tablewriter.SetBorder(true)
|
||||
// t.tablewriter.Render()
|
||||
// ret = t.buf.String()
|
||||
// t.buf = nil
|
||||
//
|
||||
// case 1:
|
||||
// ret, t.Error = t.tabular.Render()
|
||||
// // t.Error = tabular.RenderTo(t.tabular, ret, "") // "csv", "html", "json", "markdown"
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// if !t.tablib.Valid() {
|
||||
// break
|
||||
// }
|
||||
// ret = t.tablib.Tabular("utf8").String()
|
||||
// }
|
||||
|
||||
if !t.table.Valid() {
|
||||
break
|
||||
}
|
||||
|
||||
ret = t.table.Tabular("condensed").String()
|
||||
ret = t.tablib.Tabular("utf8").String()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
@ -77,16 +150,16 @@ func (t *Table) String() string {
|
||||
func (t *Table) IsValid() bool {
|
||||
var yes bool
|
||||
for range Only.Once {
|
||||
if t.table == nil {
|
||||
if t.tablib == nil {
|
||||
break
|
||||
}
|
||||
if !t.table.Valid() {
|
||||
if !t.tablib.Valid() {
|
||||
break
|
||||
}
|
||||
if t.table.Height() == 0 {
|
||||
if t.tablib.Height() == 0 {
|
||||
break
|
||||
}
|
||||
if t.table.Width() == 0 {
|
||||
if t.tablib.Width() == 0 {
|
||||
break
|
||||
}
|
||||
yes = true
|
||||
@ -99,16 +172,41 @@ func (t *Table) IsNotValid() bool {
|
||||
}
|
||||
|
||||
func (t *Table) GetHeaders() []string {
|
||||
return t.table.Headers()
|
||||
var ret []string
|
||||
for range Only.Once {
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// if t.buf == nil {
|
||||
// break
|
||||
// }
|
||||
// ret = t.headers
|
||||
//
|
||||
// case 1:
|
||||
// for _, r2 := range t.tabular.Headers() {
|
||||
// ret = append(ret, r2.String())
|
||||
// }
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// if !t.tablib.Valid() {
|
||||
// break
|
||||
// }
|
||||
// ret = t.tablib.Headers()
|
||||
// }
|
||||
|
||||
if !t.tablib.Valid() {
|
||||
break
|
||||
}
|
||||
ret = t.tablib.Headers()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func (t *Table) GetSortedHeaders() []string {
|
||||
var sorted []string
|
||||
|
||||
for range Only.Once {
|
||||
for _, h := range t.table.Headers() {
|
||||
sorted = append(sorted, h)
|
||||
}
|
||||
sorted = t.GetHeaders()
|
||||
sort.Strings(sorted)
|
||||
}
|
||||
return sorted
|
||||
@ -120,10 +218,31 @@ func (t *Table) Sort(sort string) {
|
||||
break
|
||||
}
|
||||
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// // @TODO -
|
||||
// // t.tablewriter.???()
|
||||
//
|
||||
// case 1:
|
||||
// // @TODO -
|
||||
// // t.tabular.???()
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// // Make sure we have a header.
|
||||
// for _, header := range t.tablib.Headers() {
|
||||
// if header == sort {
|
||||
// t.tablib = t.tablib.Sort(sort)
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// Make sure we have a header.
|
||||
for _, header := range t.table.Headers() {
|
||||
|
||||
for _, header := range t.tablib.Headers() {
|
||||
if header == sort {
|
||||
t.table = t.table.Sort(sort)
|
||||
t.tablib = t.tablib.Sort(sort)
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -131,18 +250,67 @@ func (t *Table) Sort(sort string) {
|
||||
}
|
||||
|
||||
func (t *Table) RowLength() int {
|
||||
return t.table.Height()
|
||||
// var ret int
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// ret = t.tablewriter.NumLines()
|
||||
//
|
||||
// case 1:
|
||||
// ret = t.tabular.NRows()
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// ret = t.tablib.Height()
|
||||
// }
|
||||
// return ret
|
||||
|
||||
return t.tablib.Height()
|
||||
}
|
||||
|
||||
func (t *Table) GetCell(row int, colName string) (string, interface{}, error) {
|
||||
var ret interface{}
|
||||
var retType string
|
||||
for range Only.Once {
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// // @TODO -
|
||||
// // t.tablewriter.??(row)
|
||||
// fmt.Println("Not supported.")
|
||||
//
|
||||
// case 1:
|
||||
// // @TODO -
|
||||
// var h string
|
||||
// var i int
|
||||
// for i, h = range t.headers {
|
||||
// if h == colName {
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// c, err := t.tabular.CellAt(t2.CellLocation {
|
||||
// Row: row,
|
||||
// Column: i,
|
||||
// })
|
||||
// if err != nil {
|
||||
// t.Error = err
|
||||
// break
|
||||
// }
|
||||
// ret = c.Item()
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// var r map[string]interface{}
|
||||
// r, t.Error = t.tablib.Row(row)
|
||||
// ret = r[colName]
|
||||
// }
|
||||
|
||||
var r map[string]interface{}
|
||||
r, t.Error = t.table.Row(row)
|
||||
r, t.Error = t.tablib.Row(row)
|
||||
if t.Error != nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = r[colName]
|
||||
// retType = reflect.TypeOf(ret).String()
|
||||
retType = reflect.TypeOf(ret).Name()
|
||||
@ -151,8 +319,25 @@ func (t *Table) GetCell(row int, colName string) (string, interface{}, error) {
|
||||
}
|
||||
|
||||
func (t *Table) AddRow(row ...interface{}) error {
|
||||
t.Error = t.table.Append(row)
|
||||
return t.Error
|
||||
// switch t.method {
|
||||
// case 2:
|
||||
// var ra []string
|
||||
// for _, r := range row {
|
||||
// ra = append(ra, fmt.Sprintf("%s", r))
|
||||
// }
|
||||
// t.tablewriter.Append(ra)
|
||||
//
|
||||
// case 1:
|
||||
// t.tabular.AddRowItems(row...)
|
||||
//
|
||||
// case 0:
|
||||
// fallthrough
|
||||
// default:
|
||||
// t.Error = t.tablib.Append(row)
|
||||
// }
|
||||
// return t.Error
|
||||
|
||||
return t.tablib.Append(row)
|
||||
}
|
||||
|
||||
func (t *Table) writeFile(data string, perm os.FileMode) error {
|
||||
@ -354,8 +539,8 @@ func (t *Table) AsCsv() string {
|
||||
break
|
||||
}
|
||||
|
||||
var result *tabular.Exportable
|
||||
result, t.Error = t.table.CSV()
|
||||
var result *tablib.Exportable
|
||||
result, t.Error = t.tablib.CSV()
|
||||
if t.Error != nil {
|
||||
break
|
||||
}
|
||||
@ -396,8 +581,8 @@ func (t *Table) AsXml() string {
|
||||
break
|
||||
}
|
||||
|
||||
var result *tabular.Exportable
|
||||
result, t.Error = t.table.XML()
|
||||
var result *tablib.Exportable
|
||||
result, t.Error = t.tablib.XML()
|
||||
if t.Error != nil {
|
||||
break
|
||||
}
|
||||
@ -438,8 +623,8 @@ func (t *Table) AsXLSX() string {
|
||||
break
|
||||
}
|
||||
|
||||
var result *tabular.Exportable
|
||||
result, t.Error = t.table.XLSX()
|
||||
var result *tablib.Exportable
|
||||
result, t.Error = t.tablib.XLSX()
|
||||
if t.Error != nil {
|
||||
break
|
||||
}
|
||||
@ -562,8 +747,8 @@ func (t *Table) AsMarkDown() string {
|
||||
break
|
||||
}
|
||||
|
||||
var result *tabular.Exportable
|
||||
result = t.table.Markdown()
|
||||
var result *tablib.Exportable
|
||||
result = t.tablib.Markdown()
|
||||
if t.Error != nil {
|
||||
break
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
)
|
||||
|
||||
|
||||
@ -49,3 +49,9 @@ func (as AreaStruct) CountEnabled() int {
|
||||
func (as AreaStruct) CountDisabled() int {
|
||||
return len(as.EndPoints.GetDisabled())
|
||||
}
|
||||
|
||||
func (as AreaStruct) CoveragePercent() float64 {
|
||||
d := len(as.EndPoints.GetDisabled())
|
||||
e := len(as.EndPoints.GetEnabled())
|
||||
return (float64(e) / (float64(e) + float64(d))) * 100
|
||||
}
|
||||
|
@ -1,13 +1,12 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/output"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"os"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
|
||||
@ -148,35 +147,122 @@ func (an Areas) ListAreas() {
|
||||
for range Only.Once {
|
||||
fmt.Println("Listing all endpoint areas:")
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetHeader([]string{"Areas", "Enabled EndPoints", "Disabled EndPoints", "Coverage %"})
|
||||
table.SetBorder(true)
|
||||
table := output.NewTable("Areas", "Enabled EndPoints", "Disabled EndPoints", "Coverage %")
|
||||
te := 0
|
||||
td := 0
|
||||
for _, area := range an.SortAreas() {
|
||||
e := an[area].CountEnabled()
|
||||
d := an[area].CountDisabled()
|
||||
p := (float64(e) / float64(d)) * 100
|
||||
table.Append([]string{
|
||||
|
||||
_ = table.AddRow(
|
||||
string(area),
|
||||
fmt.Sprintf("%d", e),
|
||||
fmt.Sprintf("%d", d),
|
||||
fmt.Sprintf("%.1f %%", p),
|
||||
})
|
||||
// fmt.Sprintf("%d", e),
|
||||
// fmt.Sprintf("%d", d),
|
||||
// fmt.Sprintf("%.1f %%", an[area].CoveragePercent()),
|
||||
e, d, an[area].CoveragePercent(),
|
||||
)
|
||||
te += e
|
||||
td += d
|
||||
}
|
||||
|
||||
table.Append([]string{"----------------", "----------------", "-----------------", "---------"})
|
||||
|
||||
p := (float64(te) / float64(td)) * 100
|
||||
table.Append([]string{
|
||||
_ = table.AddRow(strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 10))
|
||||
p := (float64(te) / (float64(te) + float64(td))) * 100
|
||||
_ = table.AddRow(
|
||||
"Total",
|
||||
fmt.Sprintf("%d", te),
|
||||
fmt.Sprintf("%d", td),
|
||||
fmt.Sprintf("%.1f %%", p),
|
||||
})
|
||||
table.Render()
|
||||
// fmt.Sprintf("%d", te),
|
||||
// fmt.Sprintf("%d", td),
|
||||
// fmt.Sprintf("%.1f %%", p),
|
||||
te, td, p,
|
||||
)
|
||||
fmt.Println(table.String())
|
||||
|
||||
// table = output.NewTable("Areas", "Enabled EndPoints", "Disabled EndPoints", "Coverage %")
|
||||
// table.SetTabular()
|
||||
// te = 0
|
||||
// td = 0
|
||||
// for _, area := range an.SortAreas() {
|
||||
// e := an[area].CountEnabled()
|
||||
// d := an[area].CountDisabled()
|
||||
//
|
||||
// _ = table.AddRow(
|
||||
// string(area),
|
||||
// // fmt.Sprintf("%d", e),
|
||||
// // fmt.Sprintf("%d", d),
|
||||
// // fmt.Sprintf("%.1f %%", an[area].CoveragePercent()),
|
||||
// e, d, an[area].CoveragePercent(),
|
||||
// )
|
||||
// te += e
|
||||
// td += d
|
||||
// }
|
||||
// _ = table.AddRow(strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 10))
|
||||
// p = (float64(te) / (float64(te) + float64(td))) * 100
|
||||
// _ = table.AddRow(
|
||||
// "Total",
|
||||
// // fmt.Sprintf("%d", te),
|
||||
// // fmt.Sprintf("%d", td),
|
||||
// // fmt.Sprintf("%.1f %%", p),
|
||||
// te, td, p,
|
||||
// )
|
||||
// fmt.Println(table.String())
|
||||
|
||||
// table = output.NewTable("Areas", "Enabled EndPoints", "Disabled EndPoints", "Coverage %")
|
||||
// table.SetTableWriter()
|
||||
// te = 0
|
||||
// td = 0
|
||||
// for _, area := range an.SortAreas() {
|
||||
// e := an[area].CountEnabled()
|
||||
// d := an[area].CountDisabled()
|
||||
//
|
||||
// _ = table.AddRow(
|
||||
// string(area),
|
||||
// // fmt.Sprintf("%d", e),
|
||||
// // fmt.Sprintf("%d", d),
|
||||
// // fmt.Sprintf("%.1f %%", an[area].CoveragePercent()),
|
||||
// e, d, an[area].CoveragePercent(),
|
||||
// )
|
||||
// te += e
|
||||
// td += d
|
||||
// }
|
||||
// _ = table.AddRow(strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 20), strings.Repeat("-", 10))
|
||||
// p = (float64(te) / (float64(te) + float64(td))) * 100
|
||||
// _ = table.AddRow(
|
||||
// "Total",
|
||||
// // fmt.Sprintf("%d", te),
|
||||
// // fmt.Sprintf("%d", td),
|
||||
// // fmt.Sprintf("%.1f %%", p),
|
||||
// te, td, p,
|
||||
// )
|
||||
// fmt.Println(table.String())
|
||||
// fmt.Println()
|
||||
|
||||
// table := tablewriter.NewWriter(os.Stdout)
|
||||
// table.SetHeader([]string{"Areas", "Enabled EndPoints", "Disabled EndPoints", "Coverage %"})
|
||||
// table.SetBorder(true)
|
||||
// te := 0
|
||||
// td := 0
|
||||
// for _, area := range an.SortAreas() {
|
||||
// e := an[area].CountEnabled()
|
||||
// d := an[area].CountDisabled()
|
||||
// p := (float64(e) / float64(d)) * 100
|
||||
// table.Append([]string{
|
||||
// string(area),
|
||||
// fmt.Sprintf("%d", e),
|
||||
// fmt.Sprintf("%d", d),
|
||||
// fmt.Sprintf("%.1f %%", p),
|
||||
// })
|
||||
// te += e
|
||||
// td += d
|
||||
// }
|
||||
//
|
||||
// table.Append([]string{"----------------", "----------------", "-----------------", "---------"})
|
||||
//
|
||||
// p := (float64(te) / float64(td)) * 100
|
||||
// table.Append([]string{
|
||||
// "Total",
|
||||
// fmt.Sprintf("%d", te),
|
||||
// fmt.Sprintf("%d", td),
|
||||
// fmt.Sprintf("%.1f %%", p),
|
||||
// })
|
||||
// table.Render()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
datatable "go.pennock.tech/tabular/auto"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
@ -382,7 +381,15 @@ func (dm *DataMap) CreateDataTables() Tables {
|
||||
for name := range dm.StructMap.TableMap {
|
||||
var ret GoStruct.StructTable
|
||||
dm.Error = ret.Process(dm.EndPoint.GetArea().String(), name, dm.StructMap.TableMap[name])
|
||||
if dm.Error != nil {
|
||||
break
|
||||
}
|
||||
|
||||
_, dm.Error = ret.CreateTable()
|
||||
if dm.Error != nil {
|
||||
break
|
||||
}
|
||||
|
||||
tables[name] = &ret
|
||||
}
|
||||
}
|
||||
@ -405,7 +412,7 @@ func (dm *DataMap) CreateResultTable(full bool) output.Table {
|
||||
"Update Freq",
|
||||
)
|
||||
|
||||
for _, p := range dm.Sort() {
|
||||
for p := range dm.Map {
|
||||
entries := dm.Map[p].Entries
|
||||
for _, de := range entries {
|
||||
if full {
|
||||
@ -473,10 +480,10 @@ func (dm *DataMap) CreateResultTable(full bool) output.Table {
|
||||
return table
|
||||
}
|
||||
|
||||
func (dm *DataMap) Print() {
|
||||
func (dm DataMap) String() string {
|
||||
var ret string
|
||||
for range Only.Once {
|
||||
table := datatable.New("utf8-heavy")
|
||||
table.AddHeaders(
|
||||
table := output.NewTable(
|
||||
"Index",
|
||||
"EndPoint",
|
||||
|
||||
@ -500,7 +507,7 @@ func (dm *DataMap) Print() {
|
||||
for _, v := range dm.Map[k].Entries {
|
||||
i++
|
||||
|
||||
table.AddRowItems(
|
||||
dm.Error = table.AddRow(
|
||||
i,
|
||||
v.EndPoint,
|
||||
|
||||
@ -517,12 +524,62 @@ func (dm *DataMap) Print() {
|
||||
v.Point.Parents.Types(),
|
||||
v.Point.Parents.Codes(),
|
||||
)
|
||||
if dm.Error != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ret, _ := table.Render()
|
||||
fmt.Println(ret)
|
||||
ret = table.String()
|
||||
|
||||
// table := datatable.New("utf8-heavy")
|
||||
// table.AddHeaders(
|
||||
// "Index",
|
||||
// "EndPoint",
|
||||
//
|
||||
// "Id",
|
||||
// "Name",
|
||||
// "Unit",
|
||||
// "Type",
|
||||
// "Value",
|
||||
// "(Value)",
|
||||
// "Valid",
|
||||
//
|
||||
// "GroupName",
|
||||
// "Parent Ids",
|
||||
// "Parent Types",
|
||||
// "Parent Codes",
|
||||
// )
|
||||
// // dm.Order - Produces double the amount of entries for some reason.
|
||||
// i := 0
|
||||
// for k := range dm.Map {
|
||||
// for _, v := range dm.Map[k].Entries {
|
||||
// i++
|
||||
//
|
||||
// table.AddRowItems(
|
||||
// i,
|
||||
// v.EndPoint,
|
||||
//
|
||||
// v.Point.Id,
|
||||
// v.Point.Description,
|
||||
// v.Point.Unit,
|
||||
// v.Point.UpdateFreq,
|
||||
// v.Value,
|
||||
// v.Current.Value.First(),
|
||||
// v.Point.Valid,
|
||||
//
|
||||
// v.Point.GroupName,
|
||||
// v.Point.Parents.PsIds(),
|
||||
// v.Point.Parents.Types(),
|
||||
// v.Point.Parents.Codes(),
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// ret, _ := table.Render()
|
||||
// fmt.Println(ret)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func (dm *DataMap) Sort() []string {
|
||||
|
@ -301,14 +301,6 @@ func (sgd *SunGrowData) GetData() error {
|
||||
// break
|
||||
// }
|
||||
}
|
||||
if sgd.Error != nil {
|
||||
break
|
||||
}
|
||||
|
||||
sgd.Error = sgd.Process()
|
||||
if sgd.Error != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return sgd.Error
|
||||
@ -492,7 +484,7 @@ func (sgd *SunGrowDataResult) Sort() []string {
|
||||
}
|
||||
|
||||
func (sgd *SunGrowDataResult) Print() {
|
||||
sgd.Response.Data.Print()
|
||||
fmt.Println(sgd.Response.Data.String())
|
||||
}
|
||||
|
||||
|
||||
@ -574,112 +566,15 @@ func (sgd *SunGrowDataResponse) Output() error {
|
||||
func (sgd *SunGrowDataResponse) OutputDataTables() error {
|
||||
for range Only.Once {
|
||||
tables := sgd.Data.CreateDataTables()
|
||||
if sgd.Data.Error != nil {
|
||||
sgd.Error = sgd.Data.Error
|
||||
break
|
||||
}
|
||||
if len(tables) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
// @iSolarCloud/api/struct_data.go:420
|
||||
// if sgd.Options.OutputType.IsGraph() {
|
||||
// for _, data := range tables {
|
||||
// if !data.IsValid {
|
||||
// fmt.Printf("# %s.%s - has no graphable data.\n", data.Area, data.Name)
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// if sgd.Options.TitleSuffix == "" {
|
||||
// sgd.Options.TitleSuffix = data.Table.GetTitle()
|
||||
// }
|
||||
// data.Table.OutputType = sgd.Options.OutputType
|
||||
// data.Table.SetSaveFile(true) // sgd.Options.SaveAsFile
|
||||
// data.Table.AppendTitle(" - %s", sgd.Options.TitleSuffix)
|
||||
// data.Table.AppendFilePrefix(sgd.Options.FileSuffix)
|
||||
//
|
||||
// if sgd.Options.GraphRequest.TimeColumn == nil {
|
||||
// for _, col := range data.Table.GetHeaders() {
|
||||
// val := data.Values.GetCell(0, col)
|
||||
// if val.IsTypeDateTime() {
|
||||
// sgd.Options.GraphRequest.TimeColumn = &col
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if sgd.Options.GraphRequest.TimeColumn == nil {
|
||||
// // No time column - abort.
|
||||
// break
|
||||
// }
|
||||
//
|
||||
// if sgd.Options.GraphRequest.UnitsColumn != nil {
|
||||
// for _, col := range data.Table.GetHeaders() {
|
||||
// if *sgd.Options.GraphRequest.UnitsColumn != col {
|
||||
// continue
|
||||
// }
|
||||
// val := data.Values.GetCell(0, col)
|
||||
// unit := val.Unit()
|
||||
// if unit != "" {
|
||||
// continue
|
||||
// }
|
||||
// sgd.Options.GraphRequest.UnitsColumn = &col
|
||||
// sgd.Options.GraphRequest.DataUnit = &unit
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// if sgd.Options.GraphRequest.NameColumn == nil {
|
||||
// }
|
||||
//
|
||||
// // if sgd.Options.GraphRequest.Width == nil {
|
||||
// // }
|
||||
//
|
||||
// // if sgd.Options.GraphRequest.Height == nil {
|
||||
// // }
|
||||
//
|
||||
// var values []string
|
||||
// if sgd.Options.GraphRequest.DataColumn == nil {
|
||||
// fmt.Println("Finding points to graph...")
|
||||
// fmt.Printf("Table Headers: %s\n", strings.Join(data.Table.GetHeaders(), ", "))
|
||||
// fmt.Printf("Table rows: %d\n", data.Rows)
|
||||
// // We don't have any DataColumn defined - find them.
|
||||
// for _, col := range data.Table.GetHeaders() {
|
||||
// val := data.Values.GetCell(0, col)
|
||||
// if val.IsNumber() {
|
||||
// values = append(values, col)
|
||||
// }
|
||||
// }
|
||||
// fmt.Printf("Found %d points:\n", len(values))
|
||||
// }
|
||||
//
|
||||
// // title := data.Table.GetTitle()
|
||||
// // file := data.Table.GetFilePrefix()
|
||||
// var title string
|
||||
// var file string
|
||||
// if sgd.Options.PrimaryKey == "" {
|
||||
// title = fmt.Sprintf("%s.%s", data.Area, data.Name)
|
||||
// file = fmt.Sprintf("%s.%ss", data.Area, data.Name)
|
||||
// } else {
|
||||
// title = fmt.Sprintf("%s.%s - %s", data.Area, data.Name, sgd.Options.PrimaryKey)
|
||||
// file = fmt.Sprintf("%s.%s-%s", data.Area, data.Name, sgd.Options.PrimaryKey)
|
||||
// }
|
||||
//
|
||||
// for _, value := range values {
|
||||
// sgd.Options.GraphRequest.DataColumn = &value
|
||||
// data.Table.SetTitle("%s - %s", title, value)
|
||||
// sgd.Options.GraphRequest.Title = data.Table.GetTitle()
|
||||
// data.Table.SetFilePrefix("%s-%s", file, value)
|
||||
//
|
||||
// sgd.Error = data.Table.SetGraph(sgd.Options.GraphRequest)
|
||||
// if sgd.Error != nil {
|
||||
// break
|
||||
// }
|
||||
//
|
||||
// sgd.Error = data.Table.Output()
|
||||
// if sgd.Error != nil {
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// break
|
||||
// }
|
||||
|
||||
for _, data := range tables {
|
||||
if sgd.Options.TitleSuffix == "" {
|
||||
sgd.Options.TitleSuffix = data.Table.GetTitle()
|
||||
@ -807,5 +702,5 @@ func (sgd *SunGrowDataResponse) LookUpPointId() {
|
||||
}
|
||||
|
||||
func (sgd *SunGrowDataResponse) Print() {
|
||||
sgd.Data.Print()
|
||||
fmt.Println(sgd.Data.String())
|
||||
}
|
||||
|
@ -56,6 +56,7 @@ type RequestArgs struct {
|
||||
DateId *valueTypes.DateTime `json:"date_id,omitempty"`
|
||||
DateType *string `json:"date_type,omitempty"`
|
||||
MonthDate *valueTypes.DateTime `json:"month_date,omitempty"`
|
||||
MonthDate2 *valueTypes.DateTime `json:"monthDate,omitempty"`
|
||||
Day *valueTypes.DateTime `json:"day,omitempty"`
|
||||
BeginTime1 *valueTypes.DateTime `json:"beginTime,omitempty"` // valueTypes.Time
|
||||
EndTime1 *valueTypes.DateTime `json:"endTime,omitempty"` // valueTypes.Time
|
||||
|
@ -5,11 +5,13 @@ import (
|
||||
"GoSungrow/iSolarCloud/AppService/getDeviceModelInfoList"
|
||||
"GoSungrow/iSolarCloud/AppService/getPowerDevicePointNames"
|
||||
"GoSungrow/iSolarCloud/AppService/queryDeviceList"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/output"
|
||||
"GoSungrow/iSolarCloud/api/GoStruct/valueTypes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
datatable "go.pennock.tech/tabular/auto"
|
||||
"os"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
@ -28,7 +30,6 @@ func (sg *SunGrow) DeviceTypeList(psIds ...string) (string, error) {
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
// data := sg.NewSunGrowData()
|
||||
// data.SetEndpoints(queryDeviceList.EndPointName)
|
||||
// data.SetArgs(
|
||||
@ -44,7 +45,6 @@ func (sg *SunGrow) DeviceTypeList(psIds ...string) (string, error) {
|
||||
// break
|
||||
// }
|
||||
|
||||
|
||||
table := datatable.New("utf8-heavy")
|
||||
table.AddHeaders("Device Type", "Name")
|
||||
|
||||
@ -85,15 +85,28 @@ func (sg *SunGrow) DeviceTypePoints(deviceTypes ...string) (string, error) {
|
||||
var ret string
|
||||
|
||||
for range Only.Once {
|
||||
if len(deviceTypes) == 0 {
|
||||
pids := sg.SetPsIds()
|
||||
if sg.Error != nil {
|
||||
break
|
||||
}
|
||||
if len(pids) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
table := datatable.New("utf8-heavy")
|
||||
table.AddHeaders("Device Type", "Point Id", "Name", "Cal Type")
|
||||
ep1 := sg.GetByStruct(queryDeviceList.EndPointName,
|
||||
queryDeviceList.RequestData{ PsId: pids[0] },
|
||||
DefaultCacheTimeout,
|
||||
)
|
||||
if sg.IsError() {
|
||||
break
|
||||
}
|
||||
data1 := queryDeviceList.Assert(ep1)
|
||||
|
||||
var points []getPowerDevicePointNames.Point
|
||||
for _, deviceType := range deviceTypes {
|
||||
|
||||
table := output.NewTable("Point Id", "Name", "Cal Type", "Device Type", "Device Name")
|
||||
|
||||
// var points []getPowerDevicePointNames.Point
|
||||
for deviceType, deviceName := range data1.Response.ResultData.DevTypeDefinition {
|
||||
ep := sg.GetByStruct(getPowerDevicePointNames.EndPointName,
|
||||
getPowerDevicePointNames.RequestData{ DeviceType: valueTypes.SetIntegerString(deviceType) },
|
||||
DefaultCacheTimeout,
|
||||
@ -102,36 +115,41 @@ func (sg *SunGrow) DeviceTypePoints(deviceTypes ...string) (string, error) {
|
||||
break
|
||||
}
|
||||
data := getPowerDevicePointNames.Assert(ep)
|
||||
points = append(points, data.Response.ResultData...)
|
||||
// points = append(points, data.Response.ResultData...)
|
||||
|
||||
// Sort table based on PointId
|
||||
pn := map[string]int{}
|
||||
for index, point := range points {
|
||||
pn[point.PointId.String()] = index
|
||||
}
|
||||
var names []string
|
||||
for point := range pn {
|
||||
names = append(names, point)
|
||||
}
|
||||
sort.Strings(names)
|
||||
// pn := map[string]int{}
|
||||
// for index, point := range points {
|
||||
// pn[point.PointId.String()] = index
|
||||
// }
|
||||
// var names []string
|
||||
// for point := range pn {
|
||||
// names = append(names, point)
|
||||
// }
|
||||
// sort.Strings(names)
|
||||
|
||||
for _, name := range names {
|
||||
index := pn[name]
|
||||
point := points[index]
|
||||
table.AddRowItems(deviceType, point.PointId, point.PointName, point.PointCalType)
|
||||
for name := range data.Response.ResultData {
|
||||
point := data.Response.ResultData[name]
|
||||
// point := points[index]
|
||||
sg.Error = table.AddRow(point.PointId.Value(), point.PointName.String(), point.PointCalType.String(), deviceType, deviceName.String())
|
||||
if sg.IsError() {
|
||||
break
|
||||
}
|
||||
}
|
||||
_, _ = fmt.Fprintf(os.Stderr, ".")
|
||||
time.Sleep(time.Millisecond * 200)
|
||||
}
|
||||
_, _ = fmt.Fprintf(os.Stderr, "\n")
|
||||
if sg.IsError() {
|
||||
break
|
||||
}
|
||||
|
||||
var r string
|
||||
r, sg.Error = table.Render()
|
||||
ret = fmt.Sprintln("# Available points:")
|
||||
table.Sort("Point Id")
|
||||
ret += table.String()
|
||||
if sg.Error != nil {
|
||||
break
|
||||
}
|
||||
ret += fmt.Sprintln("# Available points:")
|
||||
ret += r
|
||||
}
|
||||
|
||||
return ret, sg.Error
|
||||
|
@ -1,2 +1,27 @@
|
||||
package iSolarCloud
|
||||
|
||||
import (
|
||||
"GoSungrow/iSolarCloud/AppService/queryUnitList"
|
||||
"github.com/MickMake/GoUnify/Only"
|
||||
)
|
||||
|
||||
|
||||
func (sg *SunGrow) MetaUnitList() error {
|
||||
for range Only.Once {
|
||||
data := sg.NewSunGrowData()
|
||||
data.SetArgs()
|
||||
data.SetEndpoints(queryUnitList.EndPointName)
|
||||
|
||||
sg.Error = data.GetData()
|
||||
if sg.Error != nil {
|
||||
break
|
||||
}
|
||||
|
||||
sg.Error = data.OutputDataTables()
|
||||
if sg.Error != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return sg.Error
|
||||
}
|
||||
|
@ -250,7 +250,7 @@ func (sg *SunGrow) GetAllPointsData(psIds ...string) error {
|
||||
|
||||
}
|
||||
|
||||
points.Print()
|
||||
fmt.Println(points.String())
|
||||
}
|
||||
|
||||
return sg.Error
|
||||
|
@ -302,9 +302,11 @@ func (sg *SunGrow) GetDevices() []queryDeviceListForBackSys.Device {
|
||||
func (sg *SunGrow) SetPsIds(args ...string) valueTypes.PsIds {
|
||||
var pids valueTypes.PsIds
|
||||
for range Only.Once {
|
||||
pids = valueTypes.SetPsIdStrings(args)
|
||||
if len(pids) > 0 {
|
||||
break
|
||||
if len(args) > 0 {
|
||||
pids = valueTypes.SetPsIdStrings(args)
|
||||
if len(pids) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pids, sg.Error = sg.GetPsIds()
|
||||
|
11
tablib/.travis.yml
Normal file
11
tablib/.travis.yml
Normal file
@ -0,0 +1,11 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.3
|
||||
install:
|
||||
- go get github.com/bndr/gotabulate
|
||||
- go get github.com/agrison/mxj
|
||||
- go get github.com/tealeg/xlsx
|
||||
- go get gopkg.in/yaml.v2
|
||||
- go get gopkg.in/check.v1
|
||||
- go get -u github.com/agrison/go-tablib
|
39
tablib/HISTORY.md
Normal file
39
tablib/HISTORY.md
Normal file
@ -0,0 +1,39 @@
|
||||
## History
|
||||
|
||||
### 2016-02-26
|
||||
- Added support for Markdown tables export
|
||||
|
||||
### 2016-02-25
|
||||
|
||||
- Constrained columns
|
||||
- `Dataset.ValidSubset()`
|
||||
- `Dataset.InvalidSubset()`
|
||||
- Tagging a specific row after it was already created
|
||||
- Loading Databooks
|
||||
- JSON
|
||||
- YAML
|
||||
- Loading Datasets
|
||||
- CSV
|
||||
- TSV
|
||||
- XML
|
||||
- Unit test coverage
|
||||
|
||||
### 2016-02-24
|
||||
|
||||
- Constrained columns
|
||||
- Support for `time.Time` in `Dataset.MySQL()` and `Dataset.Postgres()` export.
|
||||
- Source files refactoring
|
||||
- Added on travis-ci
|
||||
- Retrieving specific rows
|
||||
- `Dataset.Row(int)`
|
||||
- `Dataset.Rows(int...)`
|
||||
- `Dataset.Slice(int, int)`
|
||||
|
||||
### 2016-02-23
|
||||
|
||||
- First release with support for:
|
||||
- Loading YAML, JSON
|
||||
- Exporting YAML, JSON, CSV, TSV, XLS, XML, ASCII
|
||||
- Filtering + Tagging
|
||||
- Sorting
|
||||
- ...
|
21
tablib/LICENSE
Normal file
21
tablib/LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Alexandre Grison
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
602
tablib/README.md
Normal file
602
tablib/README.md
Normal file
@ -0,0 +1,602 @@
|
||||
# go-tablib: format-agnostic tabular dataset library
|
||||
|
||||
[][license]
|
||||
[][godocs]
|
||||
[][goreportcard]
|
||||
[](https://travis-ci.org/agrison/go-tablib)
|
||||
|
||||
[license]: https://github.com/agrison/go-tablib/blob/master/LICENSE
|
||||
[godocs]: https://godoc.org/github.com/agrison/go-tablib
|
||||
[goreportcard]: https://goreportcard.com/report/github.com/agrison/go-tablib
|
||||
|
||||
Go-Tablib is a format-agnostic tabular dataset library, written in Go.
|
||||
This is a port of the famous Python's [tablib](https://github.com/kennethreitz/tablib) by Kenneth Reitz with some new features.
|
||||
|
||||
Export formats supported:
|
||||
|
||||
* JSON (Sets + Books)
|
||||
* YAML (Sets + Books)
|
||||
* XLSX (Sets + Books)
|
||||
* XML (Sets + Books)
|
||||
* TSV (Sets)
|
||||
* CSV (Sets)
|
||||
* ASCII + Markdown (Sets)
|
||||
* MySQL (Sets)
|
||||
* Postgres (Sets)
|
||||
|
||||
Loading formats supported:
|
||||
|
||||
* JSON (Sets + Books)
|
||||
* YAML (Sets + Books)
|
||||
* XML (Sets)
|
||||
* CSV (Sets)
|
||||
* TSV (Sets)
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
### tablib.Dataset
|
||||
A Dataset is a table of tabular data. It must have a header row. Datasets can be exported to JSON, YAML, CSV, TSV, and XML. They can be filtered, sorted and validated against constraint on columns.
|
||||
|
||||
### tablib.Databook
|
||||
A Databook is a set of Datasets. The most common form of a Databook is an Excel file with multiple spreadsheets. Databooks can be exported to JSON, YAML and XML.
|
||||
|
||||
### tablib.Exportable
|
||||
An exportable is a struct that holds a buffer representing the Databook or Dataset after it has been formated to any of the supported export formats.
|
||||
At this point the Datbook or Dataset cannot be modified anymore, but it can be returned as a `string`, a `[]byte` or written to a `io.Writer` or a file.
|
||||
|
||||
## Usage
|
||||
|
||||
Creates a dataset and populate it:
|
||||
|
||||
```go
|
||||
ds := NewDataset([]string{"firstName", "lastName"})
|
||||
```
|
||||
|
||||
Add new rows:
|
||||
```go
|
||||
ds.Append([]interface{}{"John", "Adams"})
|
||||
ds.AppendValues("George", "Washington")
|
||||
```
|
||||
|
||||
Add new columns:
|
||||
```go
|
||||
ds.AppendColumn("age", []interface{}{90, 67})
|
||||
ds.AppendColumnValues("sex", "male", "male")
|
||||
```
|
||||
|
||||
Add a dynamic column, by passing a function which has access to the current row, and must
|
||||
return a value:
|
||||
```go
|
||||
func lastNameLen(row []interface{}) interface{} {
|
||||
return len(row[1].(string))
|
||||
}
|
||||
ds.AppendDynamicColumn("lastName length", lastNameLen)
|
||||
ds.CSV()
|
||||
// >>
|
||||
// firstName, lastName, age, sex, lastName length
|
||||
// John, Adams, 90, male, 5
|
||||
// George, Washington, 67, male, 10
|
||||
```
|
||||
|
||||
Delete rows:
|
||||
```go
|
||||
ds.DeleteRow(1) // starts at 0
|
||||
```
|
||||
|
||||
Delete columns:
|
||||
```go
|
||||
ds.DeleteColumn("sex")
|
||||
```
|
||||
|
||||
Get a row or multiple rows:
|
||||
```go
|
||||
row, _ := ds.Row(0)
|
||||
fmt.Println(row["firstName"]) // George
|
||||
|
||||
rows, _ := ds.Rows(0, 1)
|
||||
fmt.Println(rows[0]["firstName"]) // George
|
||||
fmt.Println(rows[1]["firstName"]) // Thomas
|
||||
```
|
||||
|
||||
Slice a Dataset:
|
||||
```go
|
||||
newDs, _ := ds.Slice(1, 5) // returns a fresh Dataset with rows [1..5[
|
||||
```
|
||||
|
||||
|
||||
## Filtering
|
||||
|
||||
You can add **tags** to rows by using a specific `Dataset` method. This allows you to filter your `Dataset` later. This can be useful to separate rows of data based on arbitrary criteria (e.g. origin) that you don’t want to include in your `Dataset`.
|
||||
```go
|
||||
ds := NewDataset([]string{"Maker", "Model"})
|
||||
ds.AppendTagged([]interface{}{"Porsche", "911"}, "fast", "luxury")
|
||||
ds.AppendTagged([]interface{}{"Skoda", "Octavia"}, "family")
|
||||
ds.AppendTagged([]interface{}{"Ferrari", "458"}, "fast", "luxury")
|
||||
ds.AppendValues("Citroen", "Picasso")
|
||||
ds.AppendValues("Bentley", "Continental")
|
||||
ds.Tag(4, "luxury") // Bentley
|
||||
ds.AppendValuesTagged("Aston Martin", "DB9", /* these are tags */ "fast", "luxury")
|
||||
```
|
||||
|
||||
Filtering the `Dataset` is possible by calling `Filter(column)`:
|
||||
```go
|
||||
luxuryCars, err := ds.Filter("luxury").CSV()
|
||||
fmt.Println(luxuryCars)
|
||||
// >>>
|
||||
// Maker,Model
|
||||
// Porsche,911
|
||||
// Ferrari,458
|
||||
// Bentley,Continental
|
||||
// Aston Martin,DB9
|
||||
```
|
||||
|
||||
```go
|
||||
fastCars, err := ds.Filter("fast").CSV()
|
||||
fmt.Println(fastCars)
|
||||
// >>>
|
||||
// Maker,Model
|
||||
// Porsche,911
|
||||
// Ferrari,458
|
||||
// Aston Martin,DB9
|
||||
```
|
||||
|
||||
Tags at a specific row can be retrieved by calling `Dataset.Tags(index int)`
|
||||
|
||||
## Sorting
|
||||
|
||||
Datasets can be sorted by a specific column.
|
||||
```go
|
||||
ds := NewDataset([]string{"Maker", "Model", "Year"})
|
||||
ds.AppendValues("Porsche", "991", 2012)
|
||||
ds.AppendValues("Skoda", "Octavia", 2011)
|
||||
ds.AppendValues("Ferrari", "458", 2009)
|
||||
ds.AppendValues("Citroen", "Picasso II", 2013)
|
||||
ds.AppendValues("Bentley", "Continental GT", 2003)
|
||||
|
||||
sorted, err := ds.Sort("Year").CSV()
|
||||
fmt.Println(sorted)
|
||||
// >>
|
||||
// Maker, Model, Year
|
||||
// Bentley, Continental GT, 2003
|
||||
// Ferrari, 458, 2009
|
||||
// Skoda, Octavia, 2011
|
||||
// Porsche, 991, 2012
|
||||
// Citroen, Picasso II, 2013
|
||||
```
|
||||
|
||||
## Constraining
|
||||
|
||||
Datasets can have columns constrained by functions and further checked if valid.
|
||||
```go
|
||||
ds := NewDataset([]string{"Maker", "Model", "Year"})
|
||||
ds.AppendValues("Porsche", "991", 2012)
|
||||
ds.AppendValues("Skoda", "Octavia", 2011)
|
||||
ds.AppendValues("Ferrari", "458", 2009)
|
||||
ds.AppendValues("Citroen", "Picasso II", 2013)
|
||||
ds.AppendValues("Bentley", "Continental GT", 2003)
|
||||
|
||||
ds.ConstrainColumn("Year", func(val interface{}) bool { return val.(int) > 2008 })
|
||||
ds.ValidFailFast() // false
|
||||
if !ds.Valid() { // validate the whole dataset, errors are retrieved in Dataset.ValidationErrors
|
||||
ds.ValidationErrors[0] // Row: 4, Column: 2
|
||||
}
|
||||
```
|
||||
|
||||
A Dataset with constrained columns can be filtered to keep only the rows satisfying the constraints.
|
||||
```go
|
||||
valid := ds.ValidSubset().Tabular("simple") // Cars after 2008
|
||||
fmt.Println(valid)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```
|
||||
------------ --------------- ---------
|
||||
Maker Model Year
|
||||
------------ --------------- ---------
|
||||
Porsche 991 2012
|
||||
|
||||
Skoda Octavia 2011
|
||||
|
||||
Ferrari 458 2009
|
||||
|
||||
Citroen Picasso II 2013
|
||||
------------ --------------- ---------
|
||||
```
|
||||
|
||||
```go
|
||||
invalid := ds.InvalidSubset().Tabular("simple") // Cars before 2008
|
||||
fmt.Println(invalid)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```
|
||||
------------ ------------------- ---------
|
||||
Maker Model Year
|
||||
------------ ------------------- ---------
|
||||
Bentley Continental GT 2003
|
||||
------------ ------------------- ---------
|
||||
```
|
||||
|
||||
## Loading
|
||||
|
||||
### JSON
|
||||
```go
|
||||
ds, _ := LoadJSON([]byte(`[
|
||||
{"age":90,"firstName":"John","lastName":"Adams"},
|
||||
{"age":67,"firstName":"George","lastName":"Washington"},
|
||||
{"age":83,"firstName":"Henry","lastName":"Ford"}
|
||||
]`))
|
||||
```
|
||||
|
||||
### YAML
|
||||
```go
|
||||
ds, _ := LoadYAML([]byte(`- age: 90
|
||||
firstName: John
|
||||
lastName: Adams
|
||||
- age: 67
|
||||
firstName: George
|
||||
lastName: Washington
|
||||
- age: 83
|
||||
firstName: Henry
|
||||
lastName: Ford`))
|
||||
```
|
||||
|
||||
## Exports
|
||||
|
||||
### Exportable
|
||||
|
||||
Any of the following export format returns an `*Exportable` which means you can use:
|
||||
- `Bytes()` to get the content as a byte array
|
||||
- `String()` to get the content as a string
|
||||
- `WriteTo(io.Writer)` to write the content to an `io.Writer`
|
||||
- `WriteFile(filename string, perm os.FileMode)` to write to a file
|
||||
|
||||
It avoids unnecessary conversion between `string` and `[]byte` to output/write/whatever.
|
||||
Thanks to [@figlief](https://github.com/figlief) for the proposition.
|
||||
|
||||
### JSON
|
||||
```go
|
||||
json, _ := ds.JSON()
|
||||
fmt.Println(json)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```json
|
||||
[{"age":90,"firstName":"John","lastName":"Adams"},{"age":67,"firstName":"George","lastName":"Washington"},{"age":83,"firstName":"Henry","lastName":"Ford"}]
|
||||
```
|
||||
|
||||
### XML
|
||||
```go
|
||||
xml, _ := ds.XML()
|
||||
fmt.Println(xml)
|
||||
```
|
||||
|
||||
Will ouput:
|
||||
```xml
|
||||
<dataset>
|
||||
<row>
|
||||
<age>90</age>
|
||||
<firstName>John</firstName>
|
||||
<lastName>Adams</lastName>
|
||||
</row> <row>
|
||||
<age>67</age>
|
||||
<firstName>George</firstName>
|
||||
<lastName>Washington</lastName>
|
||||
</row> <row>
|
||||
<age>83</age>
|
||||
<firstName>Henry</firstName>
|
||||
<lastName>Ford</lastName>
|
||||
</row>
|
||||
</dataset>
|
||||
```
|
||||
|
||||
### CSV
|
||||
```go
|
||||
csv, _ := ds.CSV()
|
||||
fmt.Println(csv)
|
||||
```
|
||||
|
||||
Will ouput:
|
||||
```csv
|
||||
firstName,lastName,age
|
||||
John,Adams,90
|
||||
George,Washington,67
|
||||
Henry,Ford,83
|
||||
```
|
||||
|
||||
### TSV
|
||||
```go
|
||||
tsv, _ := ds.TSV()
|
||||
fmt.Println(tsv)
|
||||
```
|
||||
|
||||
Will ouput:
|
||||
```tsv
|
||||
firstName lastName age
|
||||
John Adams 90
|
||||
George Washington 67
|
||||
Henry Ford 83
|
||||
```
|
||||
|
||||
### YAML
|
||||
```go
|
||||
yaml, _ := ds.YAML()
|
||||
fmt.Println(yaml)
|
||||
```
|
||||
|
||||
Will ouput:
|
||||
```yaml
|
||||
- age: 90
|
||||
firstName: John
|
||||
lastName: Adams
|
||||
- age: 67
|
||||
firstName: George
|
||||
lastName: Washington
|
||||
- age: 83
|
||||
firstName: Henry
|
||||
lastName: Ford
|
||||
```
|
||||
|
||||
### HTML
|
||||
```go
|
||||
html := ds.HTML()
|
||||
fmt.Println(html)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```html
|
||||
<table class="table table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>firstName</th>
|
||||
<th>lastName</th>
|
||||
<th>age</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>George</td>
|
||||
<td>Washington</td>
|
||||
<td>90</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Henry</td>
|
||||
<td>Ford</td>
|
||||
<td>67</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Foo</td>
|
||||
<td>Bar</td>
|
||||
<td>83</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
```
|
||||
|
||||
### XLSX
|
||||
```go
|
||||
xlsx, _ := ds.XLSX()
|
||||
fmt.Println(xlsx)
|
||||
// >>>
|
||||
// binary content
|
||||
xlsx.WriteTo(...)
|
||||
```
|
||||
|
||||
### ASCII
|
||||
|
||||
#### Grid format
|
||||
```go
|
||||
ascii := ds.Tabular("grid" /* tablib.TabularGrid */)
|
||||
fmt.Println(ascii)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```
|
||||
+--------------+---------------+--------+
|
||||
| firstName | lastName | age |
|
||||
+==============+===============+========+
|
||||
| George | Washington | 90 |
|
||||
+--------------+---------------+--------+
|
||||
| Henry | Ford | 67 |
|
||||
+--------------+---------------+--------+
|
||||
| Foo | Bar | 83 |
|
||||
+--------------+---------------+--------+
|
||||
```
|
||||
|
||||
#### Simple format
|
||||
```go
|
||||
ascii := ds.Tabular("simple" /* tablib.TabularSimple */)
|
||||
fmt.Println(ascii)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```
|
||||
-------------- --------------- --------
|
||||
firstName lastName age
|
||||
-------------- --------------- --------
|
||||
George Washington 90
|
||||
|
||||
Henry Ford 67
|
||||
|
||||
Foo Bar 83
|
||||
-------------- --------------- --------
|
||||
```
|
||||
|
||||
#### Condensed format
|
||||
```go
|
||||
ascii := ds.Tabular("condensed" /* tablib.TabularCondensed */)
|
||||
fmt.Println(ascii)
|
||||
```
|
||||
|
||||
Similar to simple but with less line feed:
|
||||
```
|
||||
-------------- --------------- --------
|
||||
firstName lastName age
|
||||
-------------- --------------- --------
|
||||
George Washington 90
|
||||
Henry Ford 67
|
||||
Foo Bar 83
|
||||
-------------- --------------- --------
|
||||
```
|
||||
|
||||
### Markdown
|
||||
|
||||
Markdown tables are similar to the Tabular condensed format, except that they have
|
||||
pipe characters separating columns.
|
||||
|
||||
```go
|
||||
mkd := ds.Markdown() // or
|
||||
mkd := ds.Tabular("markdown" /* tablib.TabularMarkdown */)
|
||||
fmt.Println(mkd)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```
|
||||
| firstName | lastName | gpa |
|
||||
| -------------- | --------------- | ------- |
|
||||
| John | Adams | 90 |
|
||||
| George | Washington | 67 |
|
||||
| Thomas | Jefferson | 50 |
|
||||
```
|
||||
|
||||
Which equals to the following when rendered as HTML:
|
||||
|
||||
| firstName | lastName | gpa |
|
||||
| -------------- | --------------- | ------- |
|
||||
| John | Adams | 90 |
|
||||
| George | Washington | 67 |
|
||||
| Thomas | Jefferson | 50 |
|
||||
|
||||
### MySQL
|
||||
```go
|
||||
sql := ds.MySQL()
|
||||
fmt.Println(sql)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```sql
|
||||
CREATE TABLE IF NOT EXISTS presidents
|
||||
(
|
||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
||||
firstName VARCHAR(9),
|
||||
lastName VARCHAR(8),
|
||||
gpa DOUBLE
|
||||
);
|
||||
|
||||
INSERT INTO presidents VALUES(1, 'Jacques', 'Chirac', 88);
|
||||
INSERT INTO presidents VALUES(2, 'Nicolas', 'Sarkozy', 98);
|
||||
INSERT INTO presidents VALUES(3, 'François', 'Hollande', 34);
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
Numeric (`uint`, `int`, `float`, ...) are stored as `DOUBLE`, `string`s as `VARCHAR` with width set to the length of the longest string in the column, and `time.Time`s are stored as `TIMESTAMP`.
|
||||
|
||||
### Postgres
|
||||
```go
|
||||
sql := ds.Postgres()
|
||||
fmt.Println(sql)
|
||||
```
|
||||
|
||||
Will output:
|
||||
```sql
|
||||
CREATE TABLE IF NOT EXISTS presidents
|
||||
(
|
||||
id SERIAL PRIMARY KEY,
|
||||
firstName TEXT,
|
||||
lastName TEXT,
|
||||
gpa NUMERIC
|
||||
);
|
||||
|
||||
INSERT INTO presidents VALUES(1, 'Jacques', 'Chirac', 88);
|
||||
INSERT INTO presidents VALUES(2, 'Nicolas', 'Sarkozy', 98);
|
||||
INSERT INTO presidents VALUES(3, 'François', 'Hollande', 34);
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
Numerics (`uint`, `int`, `float`, ...) are stored as `NUMERIC`, `string`s as `TEXT` and `time.Time`s are stored as `TIMESTAMP`.
|
||||
|
||||
## Databooks
|
||||
|
||||
This is an example of how to use Databooks.
|
||||
|
||||
```go
|
||||
db := NewDatabook()
|
||||
// or loading a JSON content
|
||||
db, err := LoadDatabookJSON([]byte(`...`))
|
||||
// or a YAML content
|
||||
db, err := LoadDatabookYAML([]byte(`...`))
|
||||
|
||||
// a dataset of presidents
|
||||
presidents, _ := LoadJSON([]byte(`[
|
||||
{"Age":90,"First name":"John","Last name":"Adams"},
|
||||
{"Age":67,"First name":"George","Last name":"Washington"},
|
||||
{"Age":83,"First name":"Henry","Last name":"Ford"}
|
||||
]`))
|
||||
|
||||
// a dataset of cars
|
||||
cars := NewDataset([]string{"Maker", "Model", "Year"})
|
||||
cars.AppendValues("Porsche", "991", 2012)
|
||||
cars.AppendValues("Skoda", "Octavia", 2011)
|
||||
cars.AppendValues("Ferrari", "458", 2009)
|
||||
cars.AppendValues("Citroen", "Picasso II", 2013)
|
||||
cars.AppendValues("Bentley", "Continental GT", 2003)
|
||||
|
||||
// add the sheets to the Databook
|
||||
db.AddSheet("Cars", cars.Sort("Year"))
|
||||
db.AddSheet("Presidents", presidents.SortReverse("Age"))
|
||||
|
||||
fmt.Println(db.JSON())
|
||||
```
|
||||
|
||||
Will output the following JSON representation of the Databook:
|
||||
```json
|
||||
[
|
||||
{
|
||||
"title": "Cars",
|
||||
"data": [
|
||||
{"Maker":"Bentley","Model":"Continental GT","Year":2003},
|
||||
{"Maker":"Ferrari","Model":"458","Year":2009},
|
||||
{"Maker":"Skoda","Model":"Octavia","Year":2011},
|
||||
{"Maker":"Porsche","Model":"991","Year":2012},
|
||||
{"Maker":"Citroen","Model":"Picasso II","Year":2013}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Presidents",
|
||||
"data": [
|
||||
{"Age":90,"First name":"John","Last name":"Adams"},
|
||||
{"Age":83,"First name":"Henry","Last name":"Ford"},
|
||||
{"Age":67,"First name":"George","Last name":"Washington"}
|
||||
]
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/agrison/go-tablib
|
||||
```
|
||||
|
||||
For those wanting the v1 version where export methods returned a `string` and not an `Exportable`:
|
||||
```bash
|
||||
go get gopkg.in/agrison/go-tablib.v1
|
||||
```
|
||||
|
||||
## TODO
|
||||
|
||||
* Loading in more formats
|
||||
* Support more formats: DBF, XLS, LATEX, ...
|
||||
|
||||
## Contribute
|
||||
|
||||
It is a work in progress, so it may exist some bugs and edge cases not covered by the test suite.
|
||||
|
||||
But we're on Github and this is Open Source, pull requests are more than welcomed, come and have some fun :)
|
||||
|
||||
## Acknowledgement
|
||||
|
||||
Thanks to kennethreitz for the first implementation in Python, [`github.com/bndr/gotabulate`](https://github.com/bndr/gotabulate), [`github.com/clbanning/mxj`](https://github.com/clbanning/mxj), [`github.com/tealeg/xlsx`](https://github.com/tealeg/xlsx), [`gopkg.in/yaml.v2`](https://gopkg.in/yaml.v2)
|
1
tablib/gotabulate/AUTHOR
Normal file
1
tablib/gotabulate/AUTHOR
Normal file
@ -0,0 +1 @@
|
||||
Vadim Kravcenko 2014
|
0
tablib/gotabulate/CHANGELOG
Normal file
0
tablib/gotabulate/CHANGELOG
Normal file
2
tablib/gotabulate/CONTRIBUTORS
Normal file
2
tablib/gotabulate/CONTRIBUTORS
Normal file
@ -0,0 +1,2 @@
|
||||
# Contributors git log --format='%aN <%aE>' | sort -uf
|
||||
Vadim Kravcenko <bndrzz@gmail.com>
|
201
tablib/gotabulate/LICENSE
Normal file
201
tablib/gotabulate/LICENSE
Normal file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
222
tablib/gotabulate/README.md
Normal file
222
tablib/gotabulate/README.md
Normal file
@ -0,0 +1,222 @@
|
||||
# Gotabulate - Easily pretty-print tabular data
|
||||
[](https://godoc.org/github.com/bndr/gotabulate)
|
||||
[](https://travis-ci.org/bndr/gotabulate)
|
||||
|
||||
## Summary
|
||||
|
||||
Go-Tabulate - Generic Go Library for easy pretty-printing of tabular data.
|
||||
|
||||
## Installation
|
||||
|
||||
go get github.com/bndr/gotabulate
|
||||
|
||||
## Description
|
||||
|
||||
Supported data types:
|
||||
- 2D Array of `Int`, `Int64`, `Float64`, `String`, `interface{}`
|
||||
- Map of `String`, `interface{}` (Keys will be used as header)
|
||||
|
||||
## Usage
|
||||
|
||||
```go
|
||||
// Create Some Fake Rows
|
||||
row_1 := []interface{}{"john", 20, "ready"}
|
||||
row_2 := []interface{}{"bndr", 23, "ready"}
|
||||
|
||||
// Create an object from 2D interface array
|
||||
t := gotabulate.Create([][]interface{}{row_1, row_2})
|
||||
|
||||
// Set the Headers (optional)
|
||||
t.SetHeaders([]string{"age", "status"})
|
||||
|
||||
// Set the Empty String (optional)
|
||||
t.SetEmptyString("None")
|
||||
|
||||
// Set Align (Optional)
|
||||
t.SetAlign("right")
|
||||
|
||||
// Print the result: grid, or simple
|
||||
fmt.Println(t.Render("grid"))
|
||||
|
||||
+---------+--------+-----------+
|
||||
| | age | status |
|
||||
+=========+========+===========+
|
||||
| john | 20 | ready |
|
||||
+---------+--------+-----------+
|
||||
| bndr | 23 | ready |
|
||||
+---------+--------+-----------+
|
||||
```
|
||||
|
||||
## Example with String
|
||||
|
||||
```go
|
||||
// Some Strings
|
||||
string_1 := []string{"TV", "1000$", "Sold"}
|
||||
string_2 := []string{"PC", "50%", "on Hold"}
|
||||
|
||||
// Create Object
|
||||
tabulate := gotabulate.Create([][]string{string_1, string_2})
|
||||
|
||||
// Set Headers
|
||||
tabulate.SetHeaders([]string{"Type", "Cost", "Status"})
|
||||
|
||||
// Render
|
||||
fmt.Println(tabulate.Render("simple"))
|
||||
|
||||
--------- ---------- ------------
|
||||
Type Cost Status
|
||||
--------- ---------- ------------
|
||||
TV 1000$ Sold
|
||||
|
||||
PC 50% on Hold
|
||||
--------- ---------- ------------
|
||||
```
|
||||
|
||||
## Example with String Wrapping
|
||||
|
||||
```go
|
||||
tabulate := gotabulate.Create([][]string{[]string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY})
|
||||
|
||||
tabulate.SetHeaders([]string{"Header 1", "header 2", "header 3", "header 4"})
|
||||
// Set Max Cell Size
|
||||
tabulate.SetMaxCellSize(16)
|
||||
|
||||
// Turn On String Wrapping
|
||||
tabulate.SetWrapStrings(true)
|
||||
|
||||
// Render the table
|
||||
fmt.Println(tabulate.Render("grid"))
|
||||
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
+=====================+=====================+================+=============+=============+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | Lorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | Lorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | Lorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
```
|
||||
## Examples
|
||||
|
||||
```go
|
||||
t := gotabulate.Create([][]string{STRING_ARRAY, STRING_ARRAY})
|
||||
|
||||
t.SetHeaders(HEADERS) // If not headers are set, the first row will be used.
|
||||
|
||||
t.SetEmptyString("None") // Set what will be printed in the empty cell
|
||||
|
||||
rendered_string := t.Render("simple") // Render() will return a string
|
||||
|
||||
Simple Table
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
test string test string 2 test row bndr
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
4th element empty 4th element empty 4th element empty None None
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
|
||||
Grid Table (Align Right)
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | None |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
|
||||
Padded Headers:
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
+======================+======================+======================+=============+=============+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | None | None |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
|
||||
Align Center:
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
|
||||
Align Left:
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
```
|
||||
|
||||
### Status
|
||||
|
||||
Beta version. There may be edge cases that I have missed, so if your tables don't render properly please open up an issue.
|
||||
|
||||
## Contribute
|
||||
|
||||
All Contributions are welcome. The todo list is on the bottom of this README. Feel free to send a pull request.
|
||||
|
||||
## License
|
||||
|
||||
Apache License 2.0
|
||||
|
||||
## TODO
|
||||
|
||||
- [ ] Add more examples
|
||||
- [ ] Better Documentation
|
||||
- [ ] Implement more data table formats
|
||||
- [ ] Decimal point alignment for floats
|
||||
|
||||
## Acknowledgement
|
||||
|
||||
Inspired by Python package [tabulate](https://pypi.python.org/pypi/tabulate)
|
9
tablib/gotabulate/_tests/grid_floats
Normal file
9
tablib/gotabulate/_tests/grid_floats
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | None |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
9
tablib/gotabulate/_tests/grid_ints
Normal file
9
tablib/gotabulate/_tests/grid_ints
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 1 | 2 | 3 | 1000 | 200 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 1 | 2 | 3 | 1000 | 200 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 1 | 2 | 3 | 1000 | None |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
9
tablib/gotabulate/_tests/grid_ints64
Normal file
9
tablib/gotabulate/_tests/grid_ints64
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 100 | 500 | 600 | 1000 | 10000 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 100 | 500 | 600 | 1000 | 10000 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 100 | 500 | 600 | 1000 | None |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
7
tablib/gotabulate/_tests/grid_map_mixed
Normal file
7
tablib/gotabulate/_tests/grid_map_mixed
Normal file
@ -0,0 +1,7 @@
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+===================+=============+
|
||||
| string | 1 | 1.005 | another string | -2 |
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
||||
| string | 1 | 1.005 | another string | -2 |
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
7
tablib/gotabulate/_tests/grid_mixed
Normal file
7
tablib/gotabulate/_tests/grid_mixed
Normal file
@ -0,0 +1,7 @@
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+===================+=============+
|
||||
| string | 1 | 1.005 | another string | -2 |
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
||||
| string | 1 | 1.005 | another string | -2 |
|
||||
+-------------+-------------+-------------+-------------------+-------------+
|
9
tablib/gotabulate/_tests/grid_strings
Normal file
9
tablib/gotabulate/_tests/grid_strings
Normal file
@ -0,0 +1,9 @@
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+======================+======================+======================+=============+=============+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | None | None |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
11
tablib/gotabulate/_tests/grid_strings_titled
Normal file
11
tablib/gotabulate/_tests/grid_strings_titled
Normal file
@ -0,0 +1,11 @@
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| Title One |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+======================+======================+======================+=============+=============+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | None | None |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
11
tablib/gotabulate/_tests/plain_strings_titled
Normal file
11
tablib/gotabulate/_tests/plain_strings_titled
Normal file
@ -0,0 +1,11 @@
|
||||
|
||||
Make Titles Great Again
|
||||
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
4th element empty 4th element empty 4th element empty None None
|
||||
|
9
tablib/gotabulate/_tests/simple_floats
Normal file
9
tablib/gotabulate/_tests/simple_floats
Normal file
@ -0,0 +1,9 @@
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
10.01 12.002 -123.5 20.00005 1.01
|
||||
|
||||
10.01 12.002 -123.5 20.00005 1.01
|
||||
|
||||
10.01 12.002 -123.5 20.00005 None
|
||||
------------- ------------- ------------- ------------- -------------
|
9
tablib/gotabulate/_tests/simple_ints
Normal file
9
tablib/gotabulate/_tests/simple_ints
Normal file
@ -0,0 +1,9 @@
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
1 2 3 1000 200
|
||||
|
||||
1 2 3 1000 200
|
||||
|
||||
1 2 3 1000 None
|
||||
------------- ------------- ------------- ------------- -------------
|
9
tablib/gotabulate/_tests/simple_ints64
Normal file
9
tablib/gotabulate/_tests/simple_ints64
Normal file
@ -0,0 +1,9 @@
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
------------- ------------- ------------- ------------- -------------
|
||||
100 500 600 1000 10000
|
||||
|
||||
100 500 600 1000 10000
|
||||
|
||||
100 500 600 1000 None
|
||||
------------- ------------- ------------- ------------- -------------
|
7
tablib/gotabulate/_tests/simple_map_mixed
Normal file
7
tablib/gotabulate/_tests/simple_map_mixed
Normal file
@ -0,0 +1,7 @@
|
||||
------------- ------------- ------------- ------------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
------------- ------------- ------------- ------------------- -------------
|
||||
string 1 1.005 another string -2
|
||||
|
||||
string 1 1.005 another string -2
|
||||
------------- ------------- ------------- ------------------- -------------
|
7
tablib/gotabulate/_tests/simple_mixed
Normal file
7
tablib/gotabulate/_tests/simple_mixed
Normal file
@ -0,0 +1,7 @@
|
||||
------------- ------------- ------------- ------------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
------------- ------------- ------------- ------------------- -------------
|
||||
string 1 1.005 another string -2
|
||||
|
||||
string 1 1.005 another string -2
|
||||
------------- ------------- ------------- ------------------- -------------
|
9
tablib/gotabulate/_tests/simple_strings
Normal file
9
tablib/gotabulate/_tests/simple_strings
Normal file
@ -0,0 +1,9 @@
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
test string test string 2 test row bndr
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
4th element empty 4th element empty 4th element empty None None
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
11
tablib/gotabulate/_tests/simple_strings_titled
Normal file
11
tablib/gotabulate/_tests/simple_strings_titled
Normal file
@ -0,0 +1,11 @@
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
Simple Title
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
||||
test string test string 2 test row bndr
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
4th element empty 4th element empty 4th element empty None None
|
||||
---------------------- ---------------------- ---------------------- ------------- -------------
|
25
tablib/gotabulate/_tests/smart_wrap
Normal file
25
tablib/gotabulate/_tests/smart_wrap
Normal file
@ -0,0 +1,25 @@
|
||||
+-----------+-------------------------+
|
||||
| header | value |
|
||||
+===========+=========================+
|
||||
| test1 | This is a really |
|
||||
| | long string, yaaaay |
|
||||
| | it works, Vivamus |
|
||||
| | laoreet vestibulum |
|
||||
| | pretium. Nulla et |
|
||||
| | ornare elit. Cum |
|
||||
| | sociis natoque |
|
||||
| | penatibus et magnis |
|
||||
| | Vivamus laoreet |
|
||||
| | vestibulum pretium. |
|
||||
| | Nulla et ornare |
|
||||
| | elit. Cum sociis |
|
||||
| | natoque penatibus et |
|
||||
| | magnis |
|
||||
+-----------+-------------------------+
|
||||
| test2 | AAAAAAAAAAAAAAAAAAA- |
|
||||
| | ABBBBBBBBBBBBBBBBBB- |
|
||||
| | BBBBBBBCCCCCCCCCCCC- |
|
||||
| | CCCCCCCCCCCCCEEEEEE- |
|
||||
| | EEEEEEEEEEEEEEDDDDD- |
|
||||
| | DDDDDDDDd |
|
||||
+-----------+-------------------------+
|
9
tablib/gotabulate/_tests/table_float
Normal file
9
tablib/gotabulate/_tests/table_float
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
9
tablib/gotabulate/_tests/table_float_center_pad
Normal file
9
tablib/gotabulate/_tests/table_float_center_pad
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
9
tablib/gotabulate/_tests/table_float_right_pad
Normal file
9
tablib/gotabulate/_tests/table_float_right_pad
Normal file
@ -0,0 +1,9 @@
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+=============+=============+=============+=============+=============+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
||||
| 10.01 | 12.002 | -123.5 | 20.00005 | 1.01 |
|
||||
+-------------+-------------+-------------+-------------+-------------+
|
7
tablib/gotabulate/_tests/test_empty_element
Normal file
7
tablib/gotabulate/_tests/test_empty_element
Normal file
@ -0,0 +1,7 @@
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| Header 1 | Header 2 | Header 3 | Header 4 | Header 5 |
|
||||
+======================+======================+======================+=============+=============+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | None | None |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
5
tablib/gotabulate/_tests/test_first_row
Normal file
5
tablib/gotabulate/_tests/test_first_row
Normal file
@ -0,0 +1,5 @@
|
||||
+----------------+------------------+---------+--------+---------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+================+==================+=========+========+=========+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------+------------------+---------+--------+---------+
|
5
tablib/gotabulate/_tests/test_headers
Normal file
5
tablib/gotabulate/_tests/test_headers
Normal file
@ -0,0 +1,5 @@
|
||||
---------------- ------------------ ------------- ------------- -------------
|
||||
Header 1 Header 2 Header 3 Header 4 Header 5
|
||||
---------------- ------------------ ------------- ------------- -------------
|
||||
test string test string 2 test row bndr
|
||||
---------------- ------------------ ------------- ------------- -------------
|
8
tablib/gotabulate/_tests/test_hide_lines
Normal file
8
tablib/gotabulate/_tests/test_hide_lines
Normal file
@ -0,0 +1,8 @@
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | | |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
10
tablib/gotabulate/_tests/test_multibyte_string
Normal file
10
tablib/gotabulate/_tests/test_multibyte_string
Normal file
@ -0,0 +1,10 @@
|
||||
+-----------+---------------+
|
||||
| 時間帯 | 挨拶 |
|
||||
+===========+===============+
|
||||
| 朝 | おはようご |
|
||||
| | ざいます |
|
||||
+-----------+---------------+
|
||||
| 昼 | こんにちわ |
|
||||
+-----------+---------------+
|
||||
| 夜 | こんばんわ |
|
||||
+-----------+---------------+
|
9
tablib/gotabulate/_tests/test_padded_headers
Normal file
9
tablib/gotabulate/_tests/test_padded_headers
Normal file
@ -0,0 +1,9 @@
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
+======================+======================+======================+=============+=============+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
||||
| 4th element empty | 4th element empty | 4th element empty | None | None |
|
||||
+----------------------+----------------------+----------------------+-------------+-------------+
|
16
tablib/gotabulate/_tests/test_string_wrap
Normal file
16
tablib/gotabulate/_tests/test_string_wrap
Normal file
@ -0,0 +1,16 @@
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
+=====================+=====================+================+=============+=============+
|
||||
| Very long string | Another very lon | Lorem ipsum | test | test |
|
||||
| array test stri | g string just fo | | | |
|
||||
| array test stri | g string just fo | | | |
|
||||
| ng array test st | r testing | | | |
|
||||
| ng array test st | r testing | | | |
|
||||
| ring array test | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| ring array test | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| ring array test | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
||||
| ring array test | | | | |
|
||||
+---------------------+---------------------+----------------+-------------+-------------+
|
40
tablib/gotabulate/_tests/test_string_wrap_grid
Normal file
40
tablib/gotabulate/_tests/test_string_wrap_grid
Normal file
@ -0,0 +1,40 @@
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
||||
| | Header 1 | header 2 | header 3 | header 4 |
|
||||
+=====================+=====================+==================+=============+=============+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | zzLorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | zzLorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
||||
| Lorem ipsum dolo | Vivamus laoreet | zzLorem ipsum | test | test |
|
||||
| r sit amet, cons | vestibulum preti | | | |
|
||||
| ectetur adipisci | um. Nulla et orn | | | |
|
||||
| ng elit. Vivamus | are elit. Cum so | | | |
|
||||
| laoreet vestibu | ciis natoque pen | | | |
|
||||
| lum pretium. Nul | atibus et magnis | | | |
|
||||
| la et ornare eli | | | | |
|
||||
| t. Cum sociis na | | | | |
|
||||
| toque penatibus | | | | |
|
||||
| et magnis | | | | |
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
||||
| test string | test string 2 | test | row | bndr |
|
||||
+---------------------+---------------------+------------------+-------------+-------------+
|
40
tablib/gotabulate/_tests/test_string_wrap_simple
Normal file
40
tablib/gotabulate/_tests/test_string_wrap_simple
Normal file
@ -0,0 +1,40 @@
|
||||
--------------------- --------------------- ------------------ ------------- -------------
|
||||
Header 1 header 2 header 3 header 4
|
||||
--------------------- --------------------- ------------------ ------------- -------------
|
||||
Lorem ipsum dolo Vivamus laoreet zzLorem ipsum test test
|
||||
r sit amet, cons vestibulum preti
|
||||
ectetur adipisci um. Nulla et orn
|
||||
ng elit. Vivamus are elit. Cum so
|
||||
laoreet vestibu ciis natoque pen
|
||||
lum pretium. Nul atibus et magnis
|
||||
la et ornare eli
|
||||
t. Cum sociis na
|
||||
toque penatibus
|
||||
et magnis
|
||||
|
||||
Lorem ipsum dolo Vivamus laoreet zzLorem ipsum test test
|
||||
r sit amet, cons vestibulum preti
|
||||
ectetur adipisci um. Nulla et orn
|
||||
ng elit. Vivamus are elit. Cum so
|
||||
laoreet vestibu ciis natoque pen
|
||||
lum pretium. Nul atibus et magnis
|
||||
la et ornare eli
|
||||
t. Cum sociis na
|
||||
toque penatibus
|
||||
et magnis
|
||||
|
||||
test string test string 2 test row bndr
|
||||
|
||||
Lorem ipsum dolo Vivamus laoreet zzLorem ipsum test test
|
||||
r sit amet, cons vestibulum preti
|
||||
ectetur adipisci um. Nulla et orn
|
||||
ng elit. Vivamus are elit. Cum so
|
||||
laoreet vestibu ciis natoque pen
|
||||
lum pretium. Nul atibus et magnis
|
||||
la et ornare eli
|
||||
t. Cum sociis na
|
||||
toque penatibus
|
||||
et magnis
|
||||
|
||||
test string test string 2 test row bndr
|
||||
--------------------- --------------------- ------------------ ------------- -------------
|
582
tablib/gotabulate/tabulate.go
Normal file
582
tablib/gotabulate/tabulate.go
Normal file
@ -0,0 +1,582 @@
|
||||
package gotabulate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"math"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/mattn/go-runewidth"
|
||||
)
|
||||
|
||||
|
||||
// TableFormat - Basic Structure of TableFormat
|
||||
type TableFormat struct {
|
||||
LineTop Line
|
||||
LineBelowHeader Line
|
||||
LineBetweenRows Line
|
||||
LineBottom Line
|
||||
HeaderRow Row
|
||||
DataRow Row
|
||||
TitleRow Row
|
||||
Padding int
|
||||
HeaderHide bool
|
||||
FitScreen bool
|
||||
}
|
||||
|
||||
// Line - Represents a Line
|
||||
type Line struct {
|
||||
begin string
|
||||
hline string
|
||||
sep string
|
||||
end string
|
||||
}
|
||||
|
||||
// Row - Represents a Row
|
||||
type Row struct {
|
||||
begin string
|
||||
sep string
|
||||
end string
|
||||
}
|
||||
|
||||
// TableFormats - Table Formats that are available to the user
|
||||
// The user can define his own format, just by adding an entry to this map
|
||||
// and calling it with Render function e.g t.Render("customFormat")
|
||||
var TableFormats = map[string]TableFormat{
|
||||
"simple": TableFormat {
|
||||
LineTop: Line{"", "-", " ", ""},
|
||||
LineBelowHeader: Line{"", "-", " ", ""},
|
||||
LineBottom: Line{"", "-", " ", ""},
|
||||
HeaderRow: Row{"", " ", ""},
|
||||
DataRow: Row{"", " ", ""},
|
||||
TitleRow: Row{"", " ", ""},
|
||||
Padding: 1,
|
||||
},
|
||||
"plain": TableFormat {
|
||||
HeaderRow: Row{"", " ", ""},
|
||||
DataRow: Row{"", " ", ""},
|
||||
TitleRow: Row{"", " ", ""},
|
||||
Padding: 1,
|
||||
},
|
||||
"grid": TableFormat {
|
||||
LineTop: Line{"+", "-", "+", "+"},
|
||||
LineBelowHeader: Line{"+", "=", "+", "+"},
|
||||
LineBetweenRows: Line{"+", "-", "+", "+"},
|
||||
LineBottom: Line{"+", "-", "+", "+"},
|
||||
HeaderRow: Row{"|", "|", "|"},
|
||||
DataRow: Row{"|", "|", "|"},
|
||||
TitleRow: Row{"|", " ", "|"},
|
||||
Padding: 1,
|
||||
},
|
||||
"utf8": TableFormat {
|
||||
LineTop: Line{"┏", "━", "┳", "┓"},
|
||||
LineBelowHeader: Line{"┣", "━", "╇", "┫"},
|
||||
// LineBetweenRows: Line{"┣", "━", "╇", "┫"},
|
||||
LineBetweenRows: Line{"", "", "", ""},
|
||||
LineBottom: Line{"┗", "━", "┷", "┛"},
|
||||
HeaderRow: Row{"┃", "┃", "┃"},
|
||||
DataRow: Row{"┃", "┃", "┃"},
|
||||
TitleRow: Row{"┃", "┃", "┃"},
|
||||
Padding: 1,
|
||||
},
|
||||
"mick": TableFormat {
|
||||
LineTop: Line{"┏", "—", "┳", "┓"},
|
||||
LineBelowHeader: Line{"|", "—", "╇", "|"},
|
||||
// LineBetweenRows: Line{"┃", "━", "╇", "┃"},
|
||||
LineBetweenRows: Line{"", "", "", ""},
|
||||
LineBottom: Line{"┗", "—", "┷", "┛"},
|
||||
HeaderRow: Row{"|", "|", "|"},
|
||||
DataRow: Row{"|", "|", "|"},
|
||||
TitleRow: Row{"|", "|", "|"},
|
||||
Padding: 1,
|
||||
},
|
||||
"condensed": TableFormat {
|
||||
LineTop: Line{"", "-", " ", ""},
|
||||
LineBelowHeader: Line{"", "-", " ", ""},
|
||||
LineBottom: Line{"", "-", " ", ""},
|
||||
HeaderRow: Row{"", " ", ""},
|
||||
DataRow: Row{"", " ", ""},
|
||||
TitleRow: Row{"", " ", ""},
|
||||
Padding: 1,
|
||||
},
|
||||
"markdown": TableFormat {
|
||||
LineTop: Line{"", "-", " ", ""},
|
||||
LineBelowHeader: Line{"", "-", " ", ""},
|
||||
LineBottom: Line{"", "-", " ", ""},
|
||||
HeaderRow: Row{"", " ", ""},
|
||||
DataRow: Row{"", " ", ""},
|
||||
TitleRow: Row{"", " ", ""},
|
||||
Padding: 1,
|
||||
},
|
||||
}
|
||||
|
||||
// MinPadding - Minimum padding that will be applied
|
||||
var MinPadding = 5
|
||||
|
||||
// Tabulate - Main Tabulate structure
|
||||
type Tabulate struct {
|
||||
Data []*TabulateRow
|
||||
Title string
|
||||
TitleAlign string
|
||||
Headers []string
|
||||
FloatFormat byte
|
||||
TableFormat TableFormat
|
||||
Align string
|
||||
EmptyVar string
|
||||
HideLines []string
|
||||
MaxSize int
|
||||
WrapStrings bool
|
||||
WrapDelimiter rune
|
||||
SplitConcat string
|
||||
DenseMode bool
|
||||
}
|
||||
|
||||
// TabulateRow - Represents normalized tabulate Row
|
||||
type TabulateRow struct {
|
||||
Elements []string
|
||||
Continuos bool
|
||||
}
|
||||
|
||||
type writeBuffer struct {
|
||||
Buffer bytes.Buffer
|
||||
}
|
||||
|
||||
func createBuffer() *writeBuffer {
|
||||
return &writeBuffer{}
|
||||
}
|
||||
|
||||
func (b *writeBuffer) Write(str string, count int) *writeBuffer {
|
||||
for i := 0; i < count; i++ {
|
||||
b.Buffer.WriteString(str)
|
||||
}
|
||||
return b
|
||||
}
|
||||
func (b *writeBuffer) String() string {
|
||||
return b.Buffer.String()
|
||||
}
|
||||
|
||||
// Add padding to each cell
|
||||
func (t *Tabulate) padRow(arr []string, padding int) []string {
|
||||
if len(arr) < 1 {
|
||||
return arr
|
||||
}
|
||||
padded := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
b := createBuffer()
|
||||
b.Write(" ", padding)
|
||||
b.Write(el, 1)
|
||||
b.Write(" ", padding)
|
||||
padded[index] = b.String()
|
||||
}
|
||||
return padded
|
||||
}
|
||||
|
||||
// Align right (Add padding left)
|
||||
func (t *Tabulate) padLeft(width int, str string) string {
|
||||
b := createBuffer()
|
||||
b.Write(" ", (width - runewidth.StringWidth(str)))
|
||||
b.Write(str, 1)
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Align Left (Add padding right)
|
||||
func (t *Tabulate) padRight(width int, str string) string {
|
||||
b := createBuffer()
|
||||
b.Write(str, 1)
|
||||
b.Write(" ", (width - runewidth.StringWidth(str)))
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Center the element in the cell
|
||||
func (t *Tabulate) padCenter(width int, str string) string {
|
||||
b := createBuffer()
|
||||
padding := int(math.Ceil(float64((width - runewidth.StringWidth(str))) / 2.0))
|
||||
b.Write(" ", padding)
|
||||
b.Write(str, 1)
|
||||
b.Write(" ", (width - runewidth.StringWidth(b.String())))
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Build Line based on padded_widths from t.GetWidths()
|
||||
func (t *Tabulate) buildLine(padded_widths []int, padding []int, l Line) string {
|
||||
if l.begin == "" && l.hline == "" && l.sep == "" && l.end == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
cells := make([]string, len(padded_widths))
|
||||
|
||||
for i, _ := range cells {
|
||||
b := createBuffer()
|
||||
b.Write(l.hline, padding[i]+MinPadding)
|
||||
cells[i] = b.String()
|
||||
}
|
||||
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(l.begin)
|
||||
|
||||
// Print contents
|
||||
for i := 0; i < len(cells); i++ {
|
||||
buffer.WriteString(cells[i])
|
||||
if i != len(cells)-1 {
|
||||
buffer.WriteString(l.sep)
|
||||
}
|
||||
}
|
||||
|
||||
buffer.WriteString(l.end)
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
// buildRow - based on padded_widths from t.GetWidths()
|
||||
func (t *Tabulate) buildRow(elements []string, padded_widths []int, paddings []int, d Row) string {
|
||||
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(d.begin)
|
||||
padFunc := t.getAlignFunc()
|
||||
// Print contents
|
||||
for i := 0; i < len(padded_widths); i++ {
|
||||
output := ""
|
||||
if len(elements) <= i || (len(elements) > i && elements[i] == " nil ") {
|
||||
output = padFunc(padded_widths[i], t.EmptyVar)
|
||||
} else if len(elements) > i {
|
||||
output = padFunc(padded_widths[i], elements[i])
|
||||
}
|
||||
buffer.WriteString(output)
|
||||
if i != len(padded_widths)-1 {
|
||||
buffer.WriteString(d.sep)
|
||||
}
|
||||
}
|
||||
|
||||
buffer.WriteString(d.end)
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
// SetWrapDelimiter - assigns the character ina string that the rednderer
|
||||
// will attempt to split strings on when a cell must be wrapped
|
||||
func (t *Tabulate) SetWrapDelimiter(r rune) {
|
||||
t.WrapDelimiter = r
|
||||
}
|
||||
|
||||
// SetSplitConcat - assigns the character that will be used when a WrapDelimiter is
|
||||
// set but the renderer cannot abide by the desired split. This may happen when
|
||||
// the WrapDelimiter is a space ' ' but a single word is longer than the width of a cell
|
||||
func (t *Tabulate) SetSplitConcat(r string) {
|
||||
t.SplitConcat = r
|
||||
}
|
||||
|
||||
// Render - the data table
|
||||
func (t *Tabulate) Render(format ...interface{}) string {
|
||||
var lines []string
|
||||
|
||||
// If headers are set use them, otherwise pop the first row
|
||||
if len(t.Headers) < 1 && len(t.Data) > 1 {
|
||||
t.Headers, t.Data = t.Data[0].Elements, t.Data[1:]
|
||||
}
|
||||
|
||||
// Use the format that was passed as parameter, otherwise
|
||||
// use the format defined in the struct
|
||||
if len(format) > 0 {
|
||||
t.TableFormat = TableFormats[format[0].(string)]
|
||||
}
|
||||
|
||||
// If Wrap Strings is set to True,then break up the string to multiple cells
|
||||
if t.WrapStrings {
|
||||
t.Data = t.wrapCellData()
|
||||
}
|
||||
|
||||
// Check if Data is present
|
||||
if len(t.Data) < 1 {
|
||||
return ""
|
||||
}
|
||||
|
||||
if len(t.Headers) < len(t.Data[0].Elements) {
|
||||
diff := len(t.Data[0].Elements) - len(t.Headers)
|
||||
padded_header := make([]string, diff)
|
||||
for _, e := range t.Headers {
|
||||
padded_header = append(padded_header, e)
|
||||
}
|
||||
t.Headers = padded_header
|
||||
}
|
||||
|
||||
// Get Column widths for all columns
|
||||
cols := t.getWidths(t.Headers, t.Data)
|
||||
|
||||
padded_widths := make([]int, len(cols))
|
||||
for i, _ := range padded_widths {
|
||||
padded_widths[i] = cols[i] + MinPadding*t.TableFormat.Padding
|
||||
}
|
||||
|
||||
// Calculate total width of the table
|
||||
totalWidth := len(t.TableFormat.DataRow.sep) * (len(cols) - 1) // Include all but the final separator
|
||||
for _, w := range padded_widths {
|
||||
totalWidth += w
|
||||
}
|
||||
|
||||
// Start appending lines
|
||||
if len(t.Title) > 0 {
|
||||
if !inSlice("aboveTitle", t.HideLines) {
|
||||
lines = append(lines, t.buildLine(padded_widths, cols, t.TableFormat.LineTop))
|
||||
}
|
||||
savedAlign := t.Align
|
||||
if len(t.TitleAlign) > 0 {
|
||||
t.SetAlign(t.TitleAlign) // Temporary replace alignment with the title alignment
|
||||
}
|
||||
lines = append(lines, t.buildRow([]string{t.Title}, []int{totalWidth}, nil, t.TableFormat.TitleRow))
|
||||
t.SetAlign(savedAlign)
|
||||
}
|
||||
|
||||
// Append top line if not hidden
|
||||
if !inSlice("top", t.HideLines) {
|
||||
lines = append(lines, t.buildLine(padded_widths, cols, t.TableFormat.LineTop))
|
||||
}
|
||||
|
||||
// Add Header
|
||||
lines = append(lines, t.buildRow(t.padRow(t.Headers, t.TableFormat.Padding), padded_widths, cols, t.TableFormat.HeaderRow))
|
||||
|
||||
// Add Line Below Header if not hidden
|
||||
if !inSlice("belowheader", t.HideLines) {
|
||||
lines = append(lines, t.buildLine(padded_widths, cols, t.TableFormat.LineBelowHeader))
|
||||
}
|
||||
|
||||
// Add Data Rows
|
||||
for index, element := range t.Data {
|
||||
lines = append(lines, t.buildRow(t.padRow(element.Elements, t.TableFormat.Padding), padded_widths, cols, t.TableFormat.DataRow))
|
||||
if !t.DenseMode && index < len(t.Data)-1 {
|
||||
if element.Continuos != true && !inSlice("betweenLine", t.HideLines) {
|
||||
// if t.TableFormat.LineBetweenRows.begin == "" &&
|
||||
// t.TableFormat.LineBetweenRows.hline == "" &&
|
||||
// t.TableFormat.LineBetweenRows.sep == "" &&
|
||||
// t.TableFormat.LineBetweenRows.end == "" {
|
||||
// } else {
|
||||
lines = append(lines, t.buildLine(padded_widths, cols, t.TableFormat.LineBetweenRows))
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !inSlice("bottomLine", t.HideLines) {
|
||||
lines = append(lines, t.buildLine(padded_widths, cols, t.TableFormat.LineBottom))
|
||||
}
|
||||
|
||||
// Join lines
|
||||
var buffer bytes.Buffer
|
||||
for _, line := range lines {
|
||||
buffer.WriteString(line + "\n")
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
// Calculate the max column width for each element
|
||||
func (t *Tabulate) getWidths(headers []string, data []*TabulateRow) []int {
|
||||
widths := make([]int, len(headers))
|
||||
current_max := len(t.EmptyVar)
|
||||
for i := 0; i < len(headers); i++ {
|
||||
current_max = runewidth.StringWidth(headers[i])
|
||||
for _, item := range data {
|
||||
if len(item.Elements) > i && len(widths) > i {
|
||||
element := item.Elements[i]
|
||||
strLength := runewidth.StringWidth(element)
|
||||
if strLength > current_max {
|
||||
widths[i] = strLength
|
||||
current_max = strLength
|
||||
} else {
|
||||
widths[i] = current_max
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return widths
|
||||
}
|
||||
|
||||
// SetTitle sets the title of the table can also accept a second string to define an alignment for the title
|
||||
func (t *Tabulate) SetTitle(title ...string) *Tabulate {
|
||||
|
||||
t.Title = title[0]
|
||||
if len(title) > 1 {
|
||||
t.TitleAlign = title[1]
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
// SetHeaders - Set Headers of the table
|
||||
// If Headers count is less than the data row count, the headers will be padded to the right
|
||||
func (t *Tabulate) SetHeaders(headers []string) *Tabulate {
|
||||
t.Headers = headers
|
||||
return t
|
||||
}
|
||||
|
||||
// SetFloatFormat - Set Float Formatting
|
||||
// will be used in strconv.FormatFloat(element, format, -1, 64)
|
||||
func (t *Tabulate) SetFloatFormat(format byte) *Tabulate {
|
||||
t.FloatFormat = format
|
||||
return t
|
||||
}
|
||||
|
||||
// SetAlign - Set Align Type, Available options: left, right, center
|
||||
func (t *Tabulate) SetAlign(align string) {
|
||||
t.Align = align
|
||||
}
|
||||
|
||||
// Select the padding function based on the align type
|
||||
func (t *Tabulate) getAlignFunc() func(int, string) string {
|
||||
if len(t.Align) < 1 || t.Align == "right" {
|
||||
return t.padLeft
|
||||
} else if t.Align == "left" {
|
||||
return t.padRight
|
||||
} else {
|
||||
return t.padCenter
|
||||
}
|
||||
}
|
||||
|
||||
// SetEmptyString - Set how an empty cell will be represented
|
||||
func (t *Tabulate) SetEmptyString(empty string) {
|
||||
t.EmptyVar = empty + " "
|
||||
}
|
||||
|
||||
// SetHideLines - Set which lines to hide.
|
||||
// Can be:
|
||||
// top - Top line of the table,
|
||||
// belowheader - Line below the header,
|
||||
// bottomLine - Bottom line of the table
|
||||
// betweenLine - Between line of the table
|
||||
func (t *Tabulate) SetHideLines(hide []string) {
|
||||
t.HideLines = hide
|
||||
}
|
||||
|
||||
func (t *Tabulate) SetWrapStrings(wrap bool) {
|
||||
t.WrapStrings = wrap
|
||||
}
|
||||
|
||||
// SetMaxCellSize - Sets the maximum size of cell
|
||||
// If WrapStrings is set to true, then the string inside
|
||||
// the cell will be split up into multiple cell
|
||||
func (t *Tabulate) SetMaxCellSize(max int) {
|
||||
t.MaxSize = max
|
||||
}
|
||||
|
||||
// SetDenseMode - Sets dense mode
|
||||
// Under dense mode, no space line between rows
|
||||
func (t *Tabulate) SetDenseMode() {
|
||||
t.DenseMode = true
|
||||
}
|
||||
|
||||
func (t *Tabulate) splitElement(e string) (bool, string) {
|
||||
// check if we are not attempting to smartly wrap
|
||||
if t.WrapDelimiter == 0 {
|
||||
if t.SplitConcat == "" {
|
||||
return false, runewidth.Truncate(e, t.MaxSize, "")
|
||||
} else {
|
||||
return false, runewidth.Truncate(e, t.MaxSize, t.SplitConcat)
|
||||
}
|
||||
}
|
||||
|
||||
// we are attempting to wrap
|
||||
// grab the current width
|
||||
var i int
|
||||
for i = t.MaxSize; i > 1; i-- {
|
||||
// loop through our proposed truncation size looking for one that ends on
|
||||
// our requested delimiter
|
||||
x := runewidth.Truncate(e, i, "")
|
||||
// check if the NEXT string is a
|
||||
// delimiter, if it IS, then we truncate and tell the caller to shrink
|
||||
r, _ := utf8.DecodeRuneInString(e[i:])
|
||||
if r == 0 || r == 1 {
|
||||
// decode failed, take the truncation as is
|
||||
return false, x
|
||||
}
|
||||
if r == t.WrapDelimiter {
|
||||
return true, x // inform the caller that they can remove the next rune
|
||||
}
|
||||
}
|
||||
// didn't find a good length, truncate at will
|
||||
if t.SplitConcat != "" {
|
||||
return false, runewidth.Truncate(e, t.MaxSize, t.SplitConcat)
|
||||
}
|
||||
return false, runewidth.Truncate(e, t.MaxSize, "")
|
||||
}
|
||||
|
||||
// If string size is larger than t.MaxSize, then split it to multiple cells (downwards)
|
||||
func (t *Tabulate) wrapCellData() []*TabulateRow {
|
||||
var arr []*TabulateRow
|
||||
var cleanSplit bool
|
||||
var addr int
|
||||
if len(t.Data) == 0 {
|
||||
return arr
|
||||
}
|
||||
next := t.Data[0]
|
||||
for index := 0; index <= len(t.Data); index++ {
|
||||
elements := next.Elements
|
||||
new_elements := make([]string, len(elements))
|
||||
|
||||
for i, e := range elements {
|
||||
if runewidth.StringWidth(e) > t.MaxSize {
|
||||
elements[i] = runewidth.Truncate(e, t.MaxSize, "")
|
||||
cleanSplit, elements[i] = t.splitElement(e)
|
||||
if cleanSplit {
|
||||
// remove the next rune
|
||||
r, w := utf8.DecodeRuneInString(e[len(elements[i]):])
|
||||
if r != 0 && r != 1 {
|
||||
addr = w
|
||||
}
|
||||
} else {
|
||||
addr = 0
|
||||
}
|
||||
new_elements[i] = e[len(elements[i])+addr:]
|
||||
next.Continuos = true
|
||||
}
|
||||
}
|
||||
|
||||
if next.Continuos {
|
||||
arr = append(arr, next)
|
||||
next = &TabulateRow{Elements: new_elements}
|
||||
index--
|
||||
} else if index+1 < len(t.Data) {
|
||||
arr = append(arr, next)
|
||||
next = t.Data[index+1]
|
||||
} else if index >= len(t.Data) {
|
||||
arr = append(arr, next)
|
||||
}
|
||||
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
// Create - a new Tabulate Object
|
||||
// Accepts 2D String Array, 2D Int Array, 2D Int64 Array,
|
||||
// 2D Bool Array, 2D Float64 Array, 2D interface{} Array,
|
||||
// Map map[strig]string, Map map[string]interface{},
|
||||
func Create(data interface{}) *Tabulate {
|
||||
t := &Tabulate{FloatFormat: 'f', MaxSize: 30}
|
||||
|
||||
switch v := data.(type) {
|
||||
case [][]string:
|
||||
t.Data = createFromString(data.([][]string))
|
||||
case [][]int32:
|
||||
t.Data = createFromInt32(data.([][]int32))
|
||||
case [][]int64:
|
||||
t.Data = createFromInt64(data.([][]int64))
|
||||
case [][]int:
|
||||
t.Data = createFromInt(data.([][]int))
|
||||
case [][]bool:
|
||||
t.Data = createFromBool(data.([][]bool))
|
||||
case [][]float64:
|
||||
t.Data = createFromFloat64(data.([][]float64), t.FloatFormat)
|
||||
case [][]interface{}:
|
||||
t.Data = createFromMixed(data.([][]interface{}), t.FloatFormat)
|
||||
case []string:
|
||||
t.Data = createFromString([][]string{data.([]string)})
|
||||
case []interface{}:
|
||||
t.Data = createFromMixed([][]interface{}{data.([]interface{})}, t.FloatFormat)
|
||||
case map[string][]interface{}:
|
||||
t.Headers, t.Data = createFromMapMixed(data.(map[string][]interface{}), t.FloatFormat)
|
||||
case map[string][]string:
|
||||
t.Headers, t.Data = createFromMapString(data.(map[string][]string))
|
||||
default:
|
||||
fmt.Println(v)
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
245
tablib/gotabulate/tabulate_test.go
Normal file
245
tablib/gotabulate/tabulate_test.go
Normal file
@ -0,0 +1,245 @@
|
||||
package gotabulate
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var HEADERS = []string{"Header 1", "Header 2", "Header 3", "Header 4", "Header 5"}
|
||||
var INT_ARRAY = []int{1, 2, 3, 1000, 200}
|
||||
var INT64_ARRAY = []int64{100, 500, 600, 1000, 10000}
|
||||
var FLOAT_ARRAY = []float64{10.01, 12.002, -123.5, 20.00005, 1.01}
|
||||
var STRING_ARRAY = []string{"test string", "test string 2", "test", "row", "bndr"}
|
||||
var MIXED_ARRAY = []interface{}{"string", 1, 1.005, "another string", -2}
|
||||
var EMPTY_ARRAY = []string{"4th element empty", "4th element empty", "4th element empty"}
|
||||
var MIXED_MAP = map[string][]interface{}{"header1": MIXED_ARRAY, "header2": MIXED_ARRAY}
|
||||
|
||||
// Test Setters
|
||||
func TestSetFormat(t *testing.T) {
|
||||
tabulate := Create([][]float64{FLOAT_ARRAY, FLOAT_ARRAY, FLOAT_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetFloatFormat('f')
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/table_float"))
|
||||
}
|
||||
|
||||
// Test Align Left, Align Right, Align Center
|
||||
func TestPads(t *testing.T) {
|
||||
tabulate := Create([][]float64{FLOAT_ARRAY, FLOAT_ARRAY, FLOAT_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetAlign("left")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/table_float_right_pad"))
|
||||
tabulate.SetAlign("right")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/table_float"))
|
||||
tabulate.SetAlign("center")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/table_float_center_pad"))
|
||||
|
||||
}
|
||||
|
||||
func TestSetHeaders(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/test_headers"))
|
||||
}
|
||||
|
||||
func TestHeadersFirstRow(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY})
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_first_row"))
|
||||
}
|
||||
|
||||
func TestEmptyString(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_empty_element"))
|
||||
}
|
||||
|
||||
func TestMaxColWidth(t *testing.T) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func TestSingleColumn(t *testing.T) {
|
||||
tab := Create([][]string{
|
||||
{"test"},
|
||||
})
|
||||
tab.SetMaxCellSize(20)
|
||||
tab.SetWrapStrings(true)
|
||||
tab.Render("grid")
|
||||
}
|
||||
|
||||
// Test Simple
|
||||
func TestSimpleFloats(t *testing.T) {
|
||||
tabulate := Create([][]float64{FLOAT_ARRAY, FLOAT_ARRAY, FLOAT_ARRAY[:len(FLOAT_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_floats"))
|
||||
}
|
||||
|
||||
func TestSimpleInts(t *testing.T) {
|
||||
tabulate := Create([][]int{INT_ARRAY, INT_ARRAY, INT_ARRAY[:len(INT_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_ints"))
|
||||
}
|
||||
|
||||
func TestSimpleInts64(t *testing.T) {
|
||||
tabulate := Create([][]int64{INT64_ARRAY, INT64_ARRAY, INT64_ARRAY[:len(INT64_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_ints64"))
|
||||
}
|
||||
|
||||
func TestSimpleString(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_strings"))
|
||||
}
|
||||
|
||||
func TestSimpleMixed(t *testing.T) {
|
||||
tabulate := Create([][]interface{}{MIXED_ARRAY, MIXED_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_mixed"))
|
||||
}
|
||||
|
||||
func TestSimpleMapMixed(t *testing.T) {
|
||||
tabulate := Create(MIXED_MAP)
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_map_mixed"))
|
||||
}
|
||||
|
||||
// Test Grid
|
||||
|
||||
func TestGridFloats(t *testing.T) {
|
||||
tabulate := Create([][]float64{FLOAT_ARRAY, FLOAT_ARRAY, FLOAT_ARRAY[:len(FLOAT_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_floats"))
|
||||
}
|
||||
|
||||
func TestGridInts(t *testing.T) {
|
||||
tabulate := Create([][]int{INT_ARRAY, INT_ARRAY, INT_ARRAY[:len(INT_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_ints"))
|
||||
}
|
||||
|
||||
func TestGridInts64(t *testing.T) {
|
||||
tabulate := Create([][]int64{INT64_ARRAY, INT64_ARRAY, INT64_ARRAY[:len(INT64_ARRAY)-1]})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_ints64"))
|
||||
}
|
||||
|
||||
func TestGridString(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_strings"))
|
||||
}
|
||||
|
||||
func TestGridMixed(t *testing.T) {
|
||||
tabulate := Create([][]interface{}{MIXED_ARRAY, MIXED_ARRAY})
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_mixed"))
|
||||
}
|
||||
|
||||
func TestGridMapMixed(t *testing.T) {
|
||||
tabulate := Create(MIXED_MAP)
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_map_mixed"))
|
||||
}
|
||||
|
||||
func TestPaddedHeader(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetHeaders([]string{"Header 1", "header 2", "header 3", "header 4"})
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_padded_headers"))
|
||||
}
|
||||
|
||||
func TestHideLineBelowHeader(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetHeaders([]string{"Header 1", "header 2", "header 3", "header 4"})
|
||||
tabulate.SetHideLines([]string{"belowheader"})
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_hide_lines"))
|
||||
}
|
||||
func TestWrapCells(t *testing.T) {
|
||||
tabulate := Create([][]string{[]string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY})
|
||||
tabulate.SetHeaders([]string{"Header 1", "header 2", "header 3", "header 4"})
|
||||
tabulate.SetMaxCellSize(16)
|
||||
tabulate.SetWrapStrings(true)
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_string_wrap_grid"))
|
||||
}
|
||||
func TestWrapCellsSimple(t *testing.T) {
|
||||
tabulate := Create([][]string{[]string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY, []string{"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis",
|
||||
"Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis", "zzLorem ipsum", " test", "test"}, STRING_ARRAY})
|
||||
tabulate.SetHeaders([]string{"Header 1", "header 2", "header 3", "header 4"})
|
||||
tabulate.SetMaxCellSize(16)
|
||||
tabulate.SetWrapStrings(true)
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/test_string_wrap_simple"))
|
||||
}
|
||||
func TestMultiByteString(t *testing.T) {
|
||||
tabulate := Create([][]string{
|
||||
{"朝", "おはようございます"},
|
||||
{"昼", "こんにちわ"},
|
||||
{"夜", "こんばんわ"},
|
||||
})
|
||||
tabulate.SetHeaders([]string{"時間帯", "挨拶"})
|
||||
tabulate.SetMaxCellSize(10)
|
||||
tabulate.SetWrapStrings(true)
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/test_multibyte_string"))
|
||||
}
|
||||
func readTable(path string) string {
|
||||
buf, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf)
|
||||
}
|
||||
|
||||
func TestSplitCell(t *testing.T) {
|
||||
tab := Create([][]string{
|
||||
{"header", "value"},
|
||||
{"test1", "This is a really long string, yaaaay it works, Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis Vivamus laoreet vestibulum pretium. Nulla et ornare elit. Cum sociis natoque penatibus et magnis"},
|
||||
{"test2", "AAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDd"},
|
||||
})
|
||||
tab.SetMaxCellSize(20)
|
||||
tab.SetWrapStrings(true)
|
||||
tab.SetWrapDelimiter(' ')
|
||||
tab.SetSplitConcat("-")
|
||||
assert.Equal(t, tab.Render("grid"), readTable("_tests/smart_wrap"))
|
||||
}
|
||||
|
||||
func TestTitlesGrid(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetTitle("Title One", "center")
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("grid"), readTable("_tests/grid_strings_titled"))
|
||||
}
|
||||
|
||||
func TestTitlesPlain(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetTitle("Make Titles Great Again", "left")
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("plain"), readTable("_tests/plain_strings_titled"))
|
||||
}
|
||||
|
||||
func TestTitlesSimple(t *testing.T) {
|
||||
tabulate := Create([][]string{STRING_ARRAY, STRING_ARRAY, EMPTY_ARRAY})
|
||||
tabulate.SetTitle("Simple Title", "right")
|
||||
tabulate.SetHeaders(HEADERS)
|
||||
tabulate.SetEmptyString("None")
|
||||
assert.Equal(t, tabulate.Render("simple"), readTable("_tests/simple_strings_titled"))
|
||||
}
|
144
tablib/gotabulate/utils.go
Normal file
144
tablib/gotabulate/utils.go
Normal file
@ -0,0 +1,144 @@
|
||||
package gotabulate
|
||||
|
||||
import "strconv"
|
||||
import "fmt"
|
||||
|
||||
// Create normalized Array from strings
|
||||
func createFromString(data [][]string) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
|
||||
for index, el := range data {
|
||||
rows[index] = &TabulateRow{Elements: el}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array of rows from mixed data (interface{})
|
||||
func createFromMixed(data [][]interface{}, format byte) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, element := range data {
|
||||
normalized := make([]string, len(element))
|
||||
for index, el := range element {
|
||||
switch el.(type) {
|
||||
case int32:
|
||||
quoted := strconv.QuoteRuneToASCII(el.(int32))
|
||||
normalized[index] = quoted[1 : len(quoted)-1]
|
||||
case int:
|
||||
normalized[index] = strconv.Itoa(el.(int))
|
||||
case int64:
|
||||
normalized[index] = strconv.FormatInt(el.(int64), 10)
|
||||
case bool:
|
||||
normalized[index] = strconv.FormatBool(el.(bool))
|
||||
case float64:
|
||||
normalized[index] = strconv.FormatFloat(el.(float64), format, -1, 64)
|
||||
case uint64:
|
||||
normalized[index] = strconv.FormatUint(el.(uint64), 10)
|
||||
case nil:
|
||||
normalized[index] = "nil"
|
||||
default:
|
||||
normalized[index] = fmt.Sprintf("%s", el)
|
||||
}
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: normalized}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from ints
|
||||
func createFromInt(data [][]int) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, arr := range data {
|
||||
row := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
row[index] = strconv.Itoa(el)
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: row}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from float64
|
||||
func createFromFloat64(data [][]float64, format byte) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, arr := range data {
|
||||
row := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
row[index] = strconv.FormatFloat(el, format, -1, 64)
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: row}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from ints32
|
||||
func createFromInt32(data [][]int32) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, arr := range data {
|
||||
row := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
quoted := strconv.QuoteRuneToASCII(el)
|
||||
row[index] = quoted[1 : len(quoted)-1]
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: row}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from ints64
|
||||
func createFromInt64(data [][]int64) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, arr := range data {
|
||||
row := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
row[index] = strconv.FormatInt(el, 10)
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: row}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from bools
|
||||
func createFromBool(data [][]bool) []*TabulateRow {
|
||||
rows := make([]*TabulateRow, len(data))
|
||||
for index_1, arr := range data {
|
||||
row := make([]string, len(arr))
|
||||
for index, el := range arr {
|
||||
row[index] = strconv.FormatBool(el)
|
||||
}
|
||||
rows[index_1] = &TabulateRow{Elements: row}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
// Create normalized array from a map of mixed elements (interface{})
|
||||
// Keys will be used as header
|
||||
func createFromMapMixed(data map[string][]interface{}, format byte) (headers []string, tData []*TabulateRow) {
|
||||
|
||||
var dataslice [][]interface{}
|
||||
for key, value := range data {
|
||||
headers = append(headers, key)
|
||||
dataslice = append(dataslice, value)
|
||||
}
|
||||
return headers, createFromMixed(dataslice, format)
|
||||
}
|
||||
|
||||
// Create normalized array from Map of strings
|
||||
// Keys will be used as header
|
||||
func createFromMapString(data map[string][]string) (headers []string, tData []*TabulateRow) {
|
||||
var dataslice [][]string
|
||||
for key, value := range data {
|
||||
headers = append(headers, key)
|
||||
dataslice = append(dataslice, value)
|
||||
}
|
||||
return headers, createFromString(dataslice)
|
||||
}
|
||||
|
||||
// Check if element is present in a slice.
|
||||
func inSlice(a string, list []string) bool {
|
||||
for _, b := range list {
|
||||
if b == a {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
81
tablib/tablib_csv.go
Normal file
81
tablib/tablib_csv.go
Normal file
@ -0,0 +1,81 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/csv"
|
||||
)
|
||||
|
||||
// CSV returns a CSV representation of the Dataset an Exportable.
|
||||
func (d *Dataset) CSV() (*Exportable, error) {
|
||||
records := d.Records()
|
||||
b := newBuffer()
|
||||
|
||||
w := csv.NewWriter(b)
|
||||
w.WriteAll(records) // calls Flush internally
|
||||
|
||||
if err := w.Error(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newExportable(b), nil
|
||||
}
|
||||
|
||||
// LoadCSV loads a Dataset by its CSV representation.
|
||||
func LoadCSV(input []byte) (*Dataset, error) {
|
||||
reader := csv.NewReader(bytes.NewReader(input))
|
||||
records, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ds := NewDataset(records[0])
|
||||
for i := 1; i < len(records); i++ {
|
||||
// this is odd
|
||||
row := make([]interface{}, len(records[i]))
|
||||
for k, v := range records[i] {
|
||||
row[k] = v
|
||||
}
|
||||
ds.Append(row)
|
||||
}
|
||||
|
||||
return ds, nil
|
||||
}
|
||||
|
||||
// TSV returns a TSV representation of the Dataset as string.
|
||||
func (d *Dataset) TSV() (*Exportable, error) {
|
||||
records := d.Records()
|
||||
b := newBuffer()
|
||||
|
||||
w := csv.NewWriter(b)
|
||||
w.Comma = '\t'
|
||||
w.WriteAll(records) // calls Flush internally
|
||||
|
||||
if err := w.Error(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newExportable(b), nil
|
||||
}
|
||||
|
||||
// LoadTSV loads a Dataset by its TSV representation.
|
||||
func LoadTSV(input []byte) (*Dataset, error) {
|
||||
reader := csv.NewReader(bytes.NewReader(input))
|
||||
reader.Comma = '\t'
|
||||
|
||||
records, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ds := NewDataset(records[0])
|
||||
for i := 1; i < len(records); i++ {
|
||||
// this is odd
|
||||
row := make([]interface{}, len(records[i]))
|
||||
for k, v := range records[i] {
|
||||
row[k] = v
|
||||
}
|
||||
ds.Append(row)
|
||||
}
|
||||
|
||||
return ds, nil
|
||||
}
|
54
tablib/tablib_databook.go
Normal file
54
tablib/tablib_databook.go
Normal file
@ -0,0 +1,54 @@
|
||||
package tablib
|
||||
|
||||
// Sheet represents a sheet in a Databook, holding a title (if any) and a dataset.
|
||||
type Sheet struct {
|
||||
title string
|
||||
dataset *Dataset
|
||||
}
|
||||
|
||||
// Title return the title of the sheet.
|
||||
func (s Sheet) Title() string {
|
||||
return s.title
|
||||
}
|
||||
|
||||
// Dataset returns the dataset of the sheet.
|
||||
func (s Sheet) Dataset() *Dataset {
|
||||
return s.dataset
|
||||
}
|
||||
|
||||
// Databook represents a Databook which is an array of sheets.
|
||||
type Databook struct {
|
||||
sheets map[string]Sheet
|
||||
}
|
||||
|
||||
// NewDatabook constructs a new Databook.
|
||||
func NewDatabook() *Databook {
|
||||
return &Databook{make(map[string]Sheet)}
|
||||
}
|
||||
|
||||
// Sheets returns the sheets in the Databook.
|
||||
func (d *Databook) Sheets() map[string]Sheet {
|
||||
return d.sheets
|
||||
}
|
||||
|
||||
// Sheet returns the sheet with a specific title.
|
||||
func (d *Databook) Sheet(title string) Sheet {
|
||||
return d.sheets[title]
|
||||
}
|
||||
|
||||
// AddSheet adds a sheet to the Databook.
|
||||
func (d *Databook) AddSheet(title string, dataset *Dataset) {
|
||||
d.sheets[title] = Sheet{title, dataset}
|
||||
}
|
||||
|
||||
// Size returns the number of sheets in the Databook.
|
||||
func (d *Databook) Size() int {
|
||||
return len(d.sheets)
|
||||
}
|
||||
|
||||
// Wipe removes all Dataset objects from the Databook.
|
||||
func (d *Databook) Wipe() {
|
||||
for k := range d.sheets {
|
||||
delete(d.sheets, k)
|
||||
}
|
||||
}
|
828
tablib/tablib_dataset.go
Normal file
828
tablib/tablib_dataset.go
Normal file
@ -0,0 +1,828 @@
|
||||
// Package tablib is a format-agnostic tabular Dataset library, written in Go.
|
||||
// It allows you to import, export, and manipulate tabular data sets.
|
||||
// Advanced features include, dynamic columns, tags & filtering, and seamless format import & export.
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Dataset represents a set of data, which is a list of data and header for each column.
|
||||
type Dataset struct {
|
||||
// EmptyValue represents the string value to b output if a field cannot be
|
||||
// formatted as a string during output of certain formats.
|
||||
EmptyValue string
|
||||
headers []string
|
||||
data [][]interface{}
|
||||
tags [][]string
|
||||
constraints []ColumnConstraint
|
||||
rows int
|
||||
cols int
|
||||
ValidationErrors []ValidationError
|
||||
|
||||
Align string
|
||||
EmptyString string
|
||||
FloatFormat byte
|
||||
MaxCellSize int
|
||||
WrapDelimiter rune
|
||||
WrapStrings bool
|
||||
DenseMode bool
|
||||
SplitConcat string
|
||||
}
|
||||
|
||||
const (
|
||||
AlignLeft = "left"
|
||||
AlighMiddle = "center"
|
||||
AlignRight = "right"
|
||||
)
|
||||
|
||||
// SetAlign - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetAlign(align string) {
|
||||
d.Align = align
|
||||
}
|
||||
|
||||
// SetWrapStrings - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetWrapStrings(wrap bool) {
|
||||
d.WrapStrings = wrap
|
||||
}
|
||||
|
||||
// SetEmptyString - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetEmptyString(empty string) {
|
||||
d.EmptyString = empty
|
||||
}
|
||||
|
||||
// SetFloatFormat - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetFloatFormat(format byte) {
|
||||
d.FloatFormat = format
|
||||
}
|
||||
|
||||
// SetMaxCellSize - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetMaxCellSize(max int) {
|
||||
d.MaxCellSize = max
|
||||
}
|
||||
|
||||
// SetWrapDelimiter - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetWrapDelimiter(delim rune) {
|
||||
d.WrapDelimiter = delim
|
||||
}
|
||||
|
||||
// SetDenseMode - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetDenseMode(mode bool) {
|
||||
d.DenseMode = mode
|
||||
}
|
||||
|
||||
// SetSplitConcat - Mirrors the gotabulate function
|
||||
func (d *Dataset) SetSplitConcat(split string) {
|
||||
d.SplitConcat = split
|
||||
}
|
||||
|
||||
|
||||
// DynamicColumn represents a function that can be evaluated dynamically
|
||||
// when exporting to a predefined format.
|
||||
type DynamicColumn func([]interface{}) interface{}
|
||||
|
||||
// ColumnConstraint represents a function that is bound as a constraint to
|
||||
// the column so that it can validate its value
|
||||
type ColumnConstraint func(interface{}) bool
|
||||
|
||||
// ValidationError holds the position of a value in the Dataset that have failed
|
||||
// to validate a constraint.
|
||||
type ValidationError struct {
|
||||
Row int
|
||||
Column int
|
||||
}
|
||||
|
||||
// NewDataset creates a new Dataset.
|
||||
func NewDataset(headers []string) *Dataset {
|
||||
return NewDatasetWithData(headers, nil)
|
||||
}
|
||||
|
||||
// NewDatasetWithData creates a new Dataset.
|
||||
func NewDatasetWithData(headers []string, data [][]interface{}) *Dataset {
|
||||
d := &Dataset {
|
||||
EmptyValue: "",
|
||||
headers: headers,
|
||||
data: data,
|
||||
tags: make([][]string, 0),
|
||||
constraints: make([]ColumnConstraint, len(headers)),
|
||||
rows: len(data),
|
||||
cols: len(headers),
|
||||
ValidationErrors: nil,
|
||||
|
||||
// Defaults for gotabulate
|
||||
Align: AlignRight,
|
||||
EmptyString: "",
|
||||
FloatFormat: 'f',
|
||||
MaxCellSize: 30,
|
||||
WrapDelimiter: 0,
|
||||
WrapStrings: false,
|
||||
DenseMode: false,
|
||||
SplitConcat: "",
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// Headers return the headers of the Dataset.
|
||||
func (d *Dataset) Headers() []string {
|
||||
return d.headers
|
||||
}
|
||||
|
||||
// Width returns the number of columns in the Dataset.
|
||||
func (d *Dataset) Width() int {
|
||||
return d.cols
|
||||
}
|
||||
|
||||
// Height returns the number of rows in the Dataset.
|
||||
func (d *Dataset) Height() int {
|
||||
return d.rows
|
||||
}
|
||||
|
||||
// Append appends a row of values to the Dataset.
|
||||
func (d *Dataset) Append(row []interface{}) error {
|
||||
if len(row) != d.cols {
|
||||
return ErrInvalidDimensions
|
||||
}
|
||||
d.data = append(d.data, row)
|
||||
d.tags = append(d.tags, make([]string, 0))
|
||||
d.rows++
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendTagged appends a row of values to the Dataset with one or multiple tags
|
||||
// for filtering purposes.
|
||||
func (d *Dataset) AppendTagged(row []interface{}, tags ...string) error {
|
||||
if err := d.Append(row); err != nil {
|
||||
return err
|
||||
}
|
||||
d.tags[d.rows-1] = tags[:]
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendValues appends a row of values to the Dataset.
|
||||
func (d *Dataset) AppendValues(row ...interface{}) error {
|
||||
return d.Append(row[:])
|
||||
}
|
||||
|
||||
// AppendValuesTagged appends a row of values to the Dataset with one or multiple tags
|
||||
// for filtering purposes.
|
||||
func (d *Dataset) AppendValuesTagged(row ...interface{}) error {
|
||||
if len(row) < d.cols {
|
||||
return ErrInvalidDimensions
|
||||
}
|
||||
var tags []string
|
||||
for _, tag := range row[d.cols:] {
|
||||
if tagStr, ok := tag.(string); ok {
|
||||
tags = append(tags, tagStr)
|
||||
} else {
|
||||
return ErrInvalidTag
|
||||
}
|
||||
}
|
||||
return d.AppendTagged(row[:d.cols], tags...)
|
||||
}
|
||||
|
||||
// Insert inserts a row at a given index.
|
||||
func (d *Dataset) Insert(index int, row []interface{}) error {
|
||||
if index < 0 || index >= d.rows {
|
||||
return ErrInvalidRowIndex
|
||||
}
|
||||
|
||||
if len(row) != d.cols {
|
||||
return ErrInvalidDimensions
|
||||
}
|
||||
|
||||
ndata := make([][]interface{}, 0, d.rows+1)
|
||||
ndata = append(ndata, d.data[:index]...)
|
||||
ndata = append(ndata, row)
|
||||
ndata = append(ndata, d.data[index:]...)
|
||||
d.data = ndata
|
||||
d.rows++
|
||||
|
||||
ntags := make([][]string, 0, d.rows+1)
|
||||
ntags = append(ntags, d.tags[:index]...)
|
||||
ntags = append(ntags, make([]string, 0))
|
||||
ntags = append(ntags, d.tags[index:]...)
|
||||
d.tags = ntags
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// InsertValues inserts a row of values at a given index.
|
||||
func (d *Dataset) InsertValues(index int, values ...interface{}) error {
|
||||
return d.Insert(index, values[:])
|
||||
}
|
||||
|
||||
// InsertTagged inserts a row at a given index with specific tags.
|
||||
func (d *Dataset) InsertTagged(index int, row []interface{}, tags ...string) error {
|
||||
if err := d.Insert(index, row); err != nil {
|
||||
return err
|
||||
}
|
||||
d.Insert(index, row)
|
||||
d.tags[index] = tags[:]
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Tag tags a row at a given index with specific tags.
|
||||
// Returns ErrInvalidRowIndex if the row does not exist.
|
||||
func (d *Dataset) Tag(index int, tags ...string) error {
|
||||
if index < 0 || index >= d.rows {
|
||||
return ErrInvalidRowIndex
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
if !isTagged(tag, d.tags[index]) {
|
||||
d.tags[index] = append(d.tags[index], tag)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Tags returns the tags of a row at a given index.
|
||||
// Returns ErrInvalidRowIndex if the row does not exist.
|
||||
func (d *Dataset) Tags(index int) ([]string, error) {
|
||||
if index < 0 || index >= d.rows {
|
||||
return nil, ErrInvalidRowIndex
|
||||
}
|
||||
|
||||
return d.tags[index], nil
|
||||
}
|
||||
|
||||
// AppendColumn appends a new column with values to the Dataset.
|
||||
func (d *Dataset) AppendColumn(header string, cols []interface{}) error {
|
||||
if len(cols) != d.rows {
|
||||
return ErrInvalidDimensions
|
||||
}
|
||||
d.headers = append(d.headers, header)
|
||||
d.constraints = append(d.constraints, nil) // no constraint by default
|
||||
d.cols++
|
||||
for i, e := range d.data {
|
||||
d.data[i] = append(e, cols[i])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendConstrainedColumn appends a constrained column to the Dataset.
|
||||
func (d *Dataset) AppendConstrainedColumn(header string, constraint ColumnConstraint, cols []interface{}) error {
|
||||
err := d.AppendColumn(header, cols)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.constraints[d.cols-1] = constraint
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendColumnValues appends a new column with values to the Dataset.
|
||||
func (d *Dataset) AppendColumnValues(header string, cols ...interface{}) error {
|
||||
return d.AppendColumn(header, cols[:])
|
||||
}
|
||||
|
||||
// AppendDynamicColumn appends a dynamic column to the Dataset.
|
||||
func (d *Dataset) AppendDynamicColumn(header string, fn DynamicColumn) {
|
||||
d.headers = append(d.headers, header)
|
||||
d.constraints = append(d.constraints, nil)
|
||||
d.cols++
|
||||
for i, e := range d.data {
|
||||
d.data[i] = append(e, fn)
|
||||
}
|
||||
}
|
||||
|
||||
// ConstrainColumn adds a constraint to a column in the Dataset.
|
||||
func (d *Dataset) ConstrainColumn(header string, constraint ColumnConstraint) {
|
||||
i := indexOfColumn(header, d)
|
||||
if i != -1 {
|
||||
d.constraints[i] = constraint
|
||||
}
|
||||
}
|
||||
|
||||
// InsertColumn insert a new column at a given index.
|
||||
func (d *Dataset) InsertColumn(index int, header string, cols []interface{}) error {
|
||||
if index < 0 || index >= d.cols {
|
||||
return ErrInvalidColumnIndex
|
||||
}
|
||||
|
||||
if len(cols) != d.rows {
|
||||
return ErrInvalidDimensions
|
||||
}
|
||||
|
||||
d.insertHeader(index, header)
|
||||
|
||||
// for each row, insert the column
|
||||
for i, r := range d.data {
|
||||
row := make([]interface{}, 0, d.cols)
|
||||
row = append(row, r[:index]...)
|
||||
row = append(row, cols[i])
|
||||
row = append(row, r[index:]...)
|
||||
d.data[i] = row
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// InsertDynamicColumn insert a new dynamic column at a given index.
|
||||
func (d *Dataset) InsertDynamicColumn(index int, header string, fn DynamicColumn) error {
|
||||
if index < 0 || index >= d.cols {
|
||||
return ErrInvalidColumnIndex
|
||||
}
|
||||
|
||||
d.insertHeader(index, header)
|
||||
|
||||
// for each row, insert the column
|
||||
for i, r := range d.data {
|
||||
row := make([]interface{}, 0, d.cols)
|
||||
row = append(row, r[:index]...)
|
||||
row = append(row, fn)
|
||||
row = append(row, r[index:]...)
|
||||
d.data[i] = row
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// InsertConstrainedColumn insert a new constrained column at a given index.
|
||||
func (d *Dataset) InsertConstrainedColumn(index int, header string,
|
||||
constraint ColumnConstraint, cols []interface{}) error {
|
||||
err := d.InsertColumn(index, header, cols)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.constraints[index] = constraint
|
||||
return nil
|
||||
}
|
||||
|
||||
// insertHeader inserts a header at a specific index.
|
||||
func (d *Dataset) insertHeader(index int, header string) {
|
||||
headers := make([]string, 0, d.cols+1)
|
||||
headers = append(headers, d.headers[:index]...)
|
||||
headers = append(headers, header)
|
||||
headers = append(headers, d.headers[index:]...)
|
||||
d.headers = headers
|
||||
|
||||
constraints := make([]ColumnConstraint, 0, d.cols+1)
|
||||
constraints = append(constraints, d.constraints[:index]...)
|
||||
constraints = append(constraints, nil)
|
||||
constraints = append(constraints, d.constraints[index:]...)
|
||||
d.constraints = constraints
|
||||
|
||||
d.cols++
|
||||
}
|
||||
|
||||
// ValidFailFast returns whether the Dataset is valid regarding constraints that have
|
||||
// been previously set on columns.
|
||||
func (d *Dataset) ValidFailFast() bool {
|
||||
valid := true
|
||||
for column, constraint := range d.constraints {
|
||||
if constraint != nil {
|
||||
for row, val := range d.Column(d.headers[column]) {
|
||||
cellIsValid := true
|
||||
|
||||
switch val.(type) {
|
||||
case DynamicColumn:
|
||||
cellIsValid = constraint((val.(DynamicColumn))(d.data[row]))
|
||||
default:
|
||||
cellIsValid = constraint(val)
|
||||
}
|
||||
|
||||
if !cellIsValid {
|
||||
valid = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if valid {
|
||||
d.ValidationErrors = make([]ValidationError, 0)
|
||||
}
|
||||
|
||||
return valid
|
||||
}
|
||||
|
||||
// Valid returns whether the Dataset is valid regarding constraints that have
|
||||
// been previously set on columns.
|
||||
// Its behaviour is different of ValidFailFast in a sense that it will validate the whole
|
||||
// Dataset and all the validation errors will be available by using Dataset.ValidationErrors
|
||||
func (d *Dataset) Valid() bool {
|
||||
d.ValidationErrors = make([]ValidationError, 0)
|
||||
|
||||
valid := true
|
||||
for column, constraint := range d.constraints {
|
||||
if constraint != nil {
|
||||
for row, val := range d.Column(d.headers[column]) {
|
||||
cellIsValid := true
|
||||
|
||||
switch val.(type) {
|
||||
case DynamicColumn:
|
||||
cellIsValid = constraint((val.(DynamicColumn))(d.data[row]))
|
||||
default:
|
||||
cellIsValid = constraint(val)
|
||||
}
|
||||
|
||||
if !cellIsValid {
|
||||
d.ValidationErrors = append(d.ValidationErrors,
|
||||
ValidationError{Row: row, Column: column})
|
||||
valid = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return valid
|
||||
}
|
||||
|
||||
// HasAnyConstraint returns whether the Dataset has any constraint set.
|
||||
func (d *Dataset) HasAnyConstraint() bool {
|
||||
hasConstraint := false
|
||||
for _, constraint := range d.constraints {
|
||||
if constraint != nil {
|
||||
hasConstraint = true
|
||||
break
|
||||
}
|
||||
}
|
||||
return hasConstraint
|
||||
}
|
||||
|
||||
// ValidSubset return a new Dataset containing only the rows validating their
|
||||
// constraints. This is similar to what Filter() does with tags, but with constraints.
|
||||
// If no constraints are set, it returns the same instance.
|
||||
// Note: The returned Dataset is free of any constraints, tags are conserved.
|
||||
func (d *Dataset) ValidSubset() *Dataset {
|
||||
return d.internalValidSubset(true)
|
||||
}
|
||||
|
||||
// InvalidSubset return a new Dataset containing only the rows failing to validate their
|
||||
// constraints.
|
||||
// If no constraints are set, it returns the same instance.
|
||||
// Note: The returned Dataset is free of any constraints, tags are conserved.
|
||||
func (d *Dataset) InvalidSubset() *Dataset {
|
||||
return d.internalValidSubset(false)
|
||||
}
|
||||
|
||||
// internalValidSubset return a new Dataset containing only the rows validating their
|
||||
// constraints or not depending on its parameter `valid`.
|
||||
func (d *Dataset) internalValidSubset(valid bool) *Dataset {
|
||||
if !d.HasAnyConstraint() {
|
||||
return d
|
||||
}
|
||||
|
||||
nd := NewDataset(d.headers)
|
||||
nd.data = make([][]interface{}, 0)
|
||||
ndRowIndex := 0
|
||||
nd.tags = make([][]string, 0)
|
||||
|
||||
for i, row := range d.data {
|
||||
keep := true
|
||||
for j, val := range d.data[i] {
|
||||
if d.constraints[j] != nil {
|
||||
switch val.(type) {
|
||||
case DynamicColumn:
|
||||
if valid {
|
||||
keep = d.constraints[j]((val.(DynamicColumn))(row))
|
||||
} else {
|
||||
keep = !d.constraints[j]((val.(DynamicColumn))(row))
|
||||
}
|
||||
default:
|
||||
if valid {
|
||||
keep = d.constraints[j](val)
|
||||
} else {
|
||||
keep = !d.constraints[j](val)
|
||||
}
|
||||
}
|
||||
}
|
||||
if valid && !keep {
|
||||
break
|
||||
}
|
||||
}
|
||||
if keep {
|
||||
nd.data = append(nd.data, make([]interface{}, 0, nd.cols))
|
||||
nd.data[ndRowIndex] = append(nd.data[ndRowIndex], row...)
|
||||
|
||||
nd.tags = append(nd.tags, make([]string, 0, nd.cols))
|
||||
nd.tags[ndRowIndex] = append(nd.tags[ndRowIndex], d.tags[i]...)
|
||||
ndRowIndex++
|
||||
}
|
||||
}
|
||||
nd.cols = d.cols
|
||||
nd.rows = ndRowIndex
|
||||
|
||||
return nd
|
||||
}
|
||||
|
||||
// Stack stacks two Dataset by joining at the row level, and return new combined Dataset.
|
||||
func (d *Dataset) Stack(other *Dataset) (*Dataset, error) {
|
||||
if d.Width() != other.Width() {
|
||||
return nil, ErrInvalidDimensions
|
||||
}
|
||||
|
||||
nd := NewDataset(d.headers)
|
||||
nd.cols = d.cols
|
||||
nd.rows = d.rows + other.rows
|
||||
|
||||
nd.tags = make([][]string, 0, nd.rows)
|
||||
nd.tags = append(nd.tags, d.tags...)
|
||||
nd.tags = append(nd.tags, other.tags...)
|
||||
|
||||
nd.data = make([][]interface{}, 0, nd.rows)
|
||||
nd.data = append(nd.data, d.data...)
|
||||
nd.data = append(nd.data, other.data...)
|
||||
|
||||
return nd, nil
|
||||
}
|
||||
|
||||
// StackColumn stacks two Dataset by joining them at the column level, and return new combined Dataset.
|
||||
func (d *Dataset) StackColumn(other *Dataset) (*Dataset, error) {
|
||||
if d.Height() != other.Height() {
|
||||
return nil, ErrInvalidDimensions
|
||||
}
|
||||
|
||||
nheaders := d.headers
|
||||
nheaders = append(nheaders, other.headers...)
|
||||
|
||||
nd := NewDataset(nheaders)
|
||||
nd.cols = d.cols + nd.cols
|
||||
nd.rows = d.rows
|
||||
nd.data = make([][]interface{}, nd.rows, nd.rows)
|
||||
nd.tags = make([][]string, nd.rows, nd.rows)
|
||||
|
||||
for i := range d.data {
|
||||
nd.data[i] = make([]interface{}, 0, nd.cols)
|
||||
nd.data[i] = append(nd.data[i], d.data[i]...)
|
||||
nd.data[i] = append(nd.data[i], other.data[i]...)
|
||||
|
||||
nd.tags[i] = make([]string, 0, nd.cols)
|
||||
nd.tags[i] = append(nd.tags[i], d.tags[i]...)
|
||||
nd.tags[i] = append(nd.tags[i], other.tags[i]...)
|
||||
}
|
||||
|
||||
return nd, nil
|
||||
}
|
||||
|
||||
// Column returns all the values for a specific column
|
||||
// returns nil if column is not found.
|
||||
func (d *Dataset) Column(header string) []interface{} {
|
||||
colIndex := indexOfColumn(header, d)
|
||||
if colIndex == -1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
values := make([]interface{}, d.rows)
|
||||
for i, e := range d.data {
|
||||
switch e[colIndex].(type) {
|
||||
case DynamicColumn:
|
||||
values[i] = e[colIndex].(DynamicColumn)(e)
|
||||
default:
|
||||
values[i] = e[colIndex]
|
||||
}
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// Row returns a map representing a specific row of the Dataset.
|
||||
// returns tablib.ErrInvalidRowIndex if the row cannot be found
|
||||
func (d *Dataset) Row(index int) (map[string]interface{}, error) {
|
||||
if index < 0 || index >= d.rows {
|
||||
return nil, ErrInvalidRowIndex
|
||||
}
|
||||
|
||||
row := make(map[string]interface{})
|
||||
for i, e := range d.data[index] {
|
||||
switch e.(type) {
|
||||
case DynamicColumn:
|
||||
row[d.headers[i]] = e.(DynamicColumn)(d.data[index])
|
||||
default:
|
||||
row[d.headers[i]] = e
|
||||
}
|
||||
}
|
||||
return row, nil
|
||||
}
|
||||
|
||||
// Rows returns an array of map representing a set of specific rows of the Dataset.
|
||||
// returns tablib.ErrInvalidRowIndex if the row cannot be found.
|
||||
func (d *Dataset) Rows(index ...int) ([]map[string]interface{}, error) {
|
||||
for _, i := range index {
|
||||
if i < 0 || i >= d.rows {
|
||||
return nil, ErrInvalidRowIndex
|
||||
}
|
||||
}
|
||||
|
||||
rows := make([]map[string]interface{}, 0, len(index))
|
||||
for _, i := range index {
|
||||
row, _ := d.Row(i)
|
||||
rows = append(rows, row)
|
||||
}
|
||||
|
||||
return rows, nil
|
||||
}
|
||||
|
||||
// Slice returns a new Dataset representing a slice of the orignal Dataset like a slice of an array.
|
||||
// returns tablib.ErrInvalidRowIndex if the lower or upper bound is out of range.
|
||||
func (d *Dataset) Slice(lower, upperNonInclusive int) (*Dataset, error) {
|
||||
if lower > upperNonInclusive || lower < 0 || upperNonInclusive > d.rows {
|
||||
return nil, ErrInvalidRowIndex
|
||||
}
|
||||
|
||||
rowCount := upperNonInclusive - lower
|
||||
cols := d.cols
|
||||
nd := NewDataset(d.headers)
|
||||
nd.data = make([][]interface{}, 0, rowCount)
|
||||
nd.tags = make([][]string, 0, rowCount)
|
||||
nd.rows = upperNonInclusive - lower
|
||||
j := 0
|
||||
for i := lower; i < upperNonInclusive; i++ {
|
||||
nd.data = append(nd.data, make([]interface{}, 0, cols))
|
||||
nd.data[j] = make([]interface{}, 0, cols)
|
||||
nd.data[j] = append(nd.data[j], d.data[i]...)
|
||||
nd.tags = append(nd.tags, make([]string, 0, cols))
|
||||
nd.tags[j] = make([]string, 0, cols)
|
||||
nd.tags[j] = append(nd.tags[j], d.tags[i]...)
|
||||
j++
|
||||
}
|
||||
|
||||
return nd, nil
|
||||
}
|
||||
|
||||
// Filter filters a Dataset, returning a fresh Dataset including only the rows
|
||||
// previously tagged with one of the given tags. Returns a new Dataset.
|
||||
func (d *Dataset) Filter(tags ...string) *Dataset {
|
||||
nd := NewDataset(d.headers)
|
||||
for rowIndex, rowValue := range d.data {
|
||||
for _, filterTag := range tags {
|
||||
if isTagged(filterTag, d.tags[rowIndex]) {
|
||||
nd.AppendTagged(rowValue, d.tags[rowIndex]...) // copy tags
|
||||
}
|
||||
}
|
||||
}
|
||||
return nd
|
||||
}
|
||||
|
||||
// Sort sorts the Dataset by a specific column. Returns a new Dataset.
|
||||
func (d *Dataset) Sort(column string) *Dataset {
|
||||
return d.internalSort(column, false)
|
||||
}
|
||||
|
||||
// SortReverse sorts the Dataset by a specific column in reverse order. Returns a new Dataset.
|
||||
func (d *Dataset) SortReverse(column string) *Dataset {
|
||||
return d.internalSort(column, true)
|
||||
}
|
||||
|
||||
func (d *Dataset) internalSort(column string, reverse bool) *Dataset {
|
||||
nd := NewDataset(d.headers)
|
||||
nd.Align = d.Align
|
||||
nd.EmptyString = d.EmptyString
|
||||
nd.FloatFormat = d.FloatFormat
|
||||
nd.MaxCellSize = d.MaxCellSize
|
||||
nd.WrapDelimiter = d.WrapDelimiter
|
||||
nd.WrapStrings = d.WrapStrings
|
||||
nd.DenseMode = d.DenseMode
|
||||
nd.SplitConcat = d.SplitConcat
|
||||
|
||||
pairs := make([]entryPair, 0, nd.rows)
|
||||
for i, v := range d.Column(column) {
|
||||
pairs = append(pairs, entryPair{i, v})
|
||||
}
|
||||
|
||||
var how sort.Interface
|
||||
// sort by column
|
||||
switch pairs[0].value.(type) {
|
||||
case string:
|
||||
how = byStringValue(pairs)
|
||||
case int:
|
||||
how = byIntValue(pairs)
|
||||
case int64:
|
||||
how = byInt64Value(pairs)
|
||||
case uint64:
|
||||
how = byUint64Value(pairs)
|
||||
case float64:
|
||||
how = byFloatValue(pairs)
|
||||
case time.Time:
|
||||
how = byTimeValue(pairs)
|
||||
default:
|
||||
// nothing
|
||||
}
|
||||
|
||||
if !reverse {
|
||||
sort.Sort(how)
|
||||
} else {
|
||||
sort.Sort(sort.Reverse(how))
|
||||
}
|
||||
|
||||
// now iterate on the pairs and add the data sorted to the new Dataset
|
||||
for _, p := range pairs {
|
||||
nd.AppendTagged(d.data[p.index], d.tags[p.index]...)
|
||||
}
|
||||
|
||||
return nd
|
||||
}
|
||||
|
||||
// Transpose transposes a Dataset, turning rows into columns and vice versa,
|
||||
// returning a new Dataset instance. The first row of the original instance
|
||||
// becomes the new header row. Tags, constraints and dynamic columns are lost
|
||||
// in the returned Dataset.
|
||||
// TODO
|
||||
func (d *Dataset) Transpose() *Dataset {
|
||||
newHeaders := make([]string, 0, d.cols+1)
|
||||
newHeaders = append(newHeaders, d.headers[0])
|
||||
for _, c := range d.Column(d.headers[0]) {
|
||||
newHeaders = append(newHeaders, d.asString(c))
|
||||
}
|
||||
|
||||
nd := NewDataset(newHeaders)
|
||||
nd.data = make([][]interface{}, 0, d.cols)
|
||||
for i := 1; i < d.cols; i++ {
|
||||
nd.data = append(nd.data, make([]interface{}, 0, d.rows))
|
||||
nd.data[i-1] = make([]interface{}, 0, d.rows)
|
||||
nd.data[i-1] = append(nd.data[i-1], d.headers[i])
|
||||
nd.data[i-1] = append(nd.data[i-1], d.Column(d.headers[i])...)
|
||||
}
|
||||
nd.rows = d.cols - 1
|
||||
|
||||
return nd
|
||||
}
|
||||
|
||||
// DeleteRow deletes a row at a specific index
|
||||
func (d *Dataset) DeleteRow(row int) error {
|
||||
if row < 0 || row >= d.rows {
|
||||
return ErrInvalidRowIndex
|
||||
}
|
||||
d.data = append(d.data[:row], d.data[row+1:]...)
|
||||
d.rows--
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteColumn deletes a column from the Dataset.
|
||||
func (d *Dataset) DeleteColumn(header string) error {
|
||||
colIndex := indexOfColumn(header, d)
|
||||
if colIndex == -1 {
|
||||
return ErrInvalidColumnIndex
|
||||
}
|
||||
d.cols--
|
||||
d.headers = append(d.headers[:colIndex], d.headers[colIndex+1:]...)
|
||||
// remove the column
|
||||
for i := range d.data {
|
||||
d.data[i] = append(d.data[i][:colIndex], d.data[i][colIndex+1:]...)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func indexOfColumn(header string, d *Dataset) int {
|
||||
for i, e := range d.headers {
|
||||
if e == header {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// Dict returns the Dataset as an array of map where each key is a column.
|
||||
func (d *Dataset) Dict() []interface{} {
|
||||
back := make([]interface{}, d.rows)
|
||||
for i, e := range d.data {
|
||||
m := make(map[string]interface{}, d.cols-1)
|
||||
for j, c := range d.headers {
|
||||
switch e[j].(type) {
|
||||
case DynamicColumn:
|
||||
m[c] = e[j].(DynamicColumn)(e)
|
||||
default:
|
||||
m[c] = e[j]
|
||||
}
|
||||
}
|
||||
back[i] = m
|
||||
}
|
||||
return back
|
||||
}
|
||||
|
||||
// Records returns the Dataset as an array of array where each entry is a string.
|
||||
// The first row of the returned 2d array represents the columns of the Dataset.
|
||||
func (d *Dataset) Records() [][]string {
|
||||
records := make([][]string, d.rows+1 /* +1 for header */)
|
||||
records[0] = make([]string, d.cols)
|
||||
for j, e := range d.headers {
|
||||
records[0][j] = e
|
||||
}
|
||||
for i, e := range d.data {
|
||||
rowIndex := i + 1
|
||||
j := 0
|
||||
records[rowIndex] = make([]string, d.cols)
|
||||
for _, v := range e {
|
||||
vv := v
|
||||
switch v.(type) {
|
||||
case DynamicColumn:
|
||||
vv = v.(DynamicColumn)(e)
|
||||
default:
|
||||
// nothing
|
||||
}
|
||||
records[rowIndex][j] = d.asString(vv)
|
||||
j++
|
||||
}
|
||||
}
|
||||
|
||||
return records
|
||||
}
|
||||
|
||||
// ffs
|
||||
func justLetMeKeepFmt() {
|
||||
fmt.Printf("")
|
||||
}
|
20
tablib/tablib_errors.go
Normal file
20
tablib/tablib_errors.go
Normal file
@ -0,0 +1,20 @@
|
||||
package tablib
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
// ErrInvalidDimensions is returned when trying to append/insert too much
|
||||
// or not enough values to a row or column
|
||||
ErrInvalidDimensions = errors.New("tablib: Invalid dimension")
|
||||
// ErrInvalidColumnIndex is returned when trying to insert a column at an
|
||||
// invalid index
|
||||
ErrInvalidColumnIndex = errors.New("tablib: Invalid column index")
|
||||
// ErrInvalidRowIndex is returned when trying to insert a row at an
|
||||
// invalid index
|
||||
ErrInvalidRowIndex = errors.New("tablib: Invalid row index")
|
||||
// ErrInvalidDataset is returned when trying to validate a Dataset against
|
||||
// the constraints that have been set on its columns.
|
||||
ErrInvalidDataset = errors.New("tablib: Invalid dataset")
|
||||
// ErrInvalidTag is returned when trying to add a tag which is not a string.
|
||||
ErrInvalidTag = errors.New("tablib: A tag must be a string")
|
||||
)
|
70
tablib/tablib_exportable.go
Normal file
70
tablib/tablib_exportable.go
Normal file
@ -0,0 +1,70 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
)
|
||||
|
||||
const defaultBufferCap = 16 * 1024
|
||||
|
||||
// newBuffer returns a new bytes.Buffer instance already initialized
|
||||
// with an underlying bytes array of the capacity equal to defaultBufferCap.
|
||||
func newBuffer() *bytes.Buffer {
|
||||
return newBufferWithCap(defaultBufferCap)
|
||||
}
|
||||
|
||||
// newBufferWithCap returns a new bytes.Buffer instance already initialized
|
||||
// with an underlying bytes array of the given capacity.
|
||||
func newBufferWithCap(initialCap int) *bytes.Buffer {
|
||||
initialBuf := make([]byte, 0, initialCap)
|
||||
return bytes.NewBuffer(initialBuf)
|
||||
}
|
||||
|
||||
// Exportable represents an exportable dataset, it cannot be manipulated at this point
|
||||
// and it can just be converted to a string, []byte or written to a io.Writer.
|
||||
// The exportable struct just holds a bytes.Buffer that is used by the tablib library
|
||||
// to write export formats content. Real work is delegated to bytes.Buffer.
|
||||
type Exportable struct {
|
||||
buffer *bytes.Buffer
|
||||
}
|
||||
|
||||
// newExportable creates a new instance of Exportable from a bytes.Buffer.
|
||||
func newExportable(buffer *bytes.Buffer) *Exportable {
|
||||
return &Exportable{buffer}
|
||||
}
|
||||
|
||||
// newExportable creates a new instance of Exportable from a byte array.
|
||||
func newExportableFromBytes(buf []byte) *Exportable {
|
||||
return &Exportable{bytes.NewBuffer(buf)}
|
||||
}
|
||||
|
||||
// newExportableFromString creates a new instance of Exportable from a string.
|
||||
func newExportableFromString(str string) *Exportable {
|
||||
buff := newBufferWithCap(len(str))
|
||||
buff.WriteString(str)
|
||||
return newExportable(buff)
|
||||
}
|
||||
|
||||
// Bytes returns the contentes of the exported dataset as a byte array.
|
||||
func (e *Exportable) Bytes() []byte {
|
||||
return e.buffer.Bytes()
|
||||
}
|
||||
|
||||
// String returns the contents of the exported dataset as a string.
|
||||
func (e *Exportable) String() string {
|
||||
return e.buffer.String()
|
||||
}
|
||||
|
||||
// WriteTo writes the exported dataset to w.
|
||||
func (e *Exportable) WriteTo(w io.Writer) (int64, error) {
|
||||
return e.buffer.WriteTo(w)
|
||||
}
|
||||
|
||||
// WriteFile writes the databook or dataset content to a file named by filename.
|
||||
// If the file does not exist, WriteFile creates it with permissions perm;
|
||||
// otherwise WriteFile truncates it before writing.
|
||||
func (e *Exportable) WriteFile(filename string, perm os.FileMode) error {
|
||||
return ioutil.WriteFile(filename, e.Bytes(), perm)
|
||||
}
|
41
tablib/tablib_html.go
Normal file
41
tablib/tablib_html.go
Normal file
@ -0,0 +1,41 @@
|
||||
package tablib
|
||||
|
||||
// HTML returns the HTML representation of the Dataset as an Exportable.
|
||||
func (d *Dataset) HTML() *Exportable {
|
||||
back := d.Records()
|
||||
b := newBuffer()
|
||||
|
||||
b.WriteString("<table class=\"table table-striped\">\n\t<thead>")
|
||||
for i, r := range back {
|
||||
b.WriteString("\n\t\t<tr>")
|
||||
for _, c := range r {
|
||||
tag := "td"
|
||||
if i == 0 {
|
||||
tag = "th"
|
||||
}
|
||||
b.WriteString("\n\t\t\t<" + tag + ">")
|
||||
b.WriteString(c)
|
||||
b.WriteString("</" + tag + ">")
|
||||
}
|
||||
b.WriteString("\n\t\t</tr>")
|
||||
if i == 0 {
|
||||
b.WriteString("\n\t</thead>\n\t<tbody>")
|
||||
}
|
||||
}
|
||||
b.WriteString("\n\t</tbody>\n</table>")
|
||||
|
||||
return newExportable(b)
|
||||
}
|
||||
|
||||
// HTML returns a HTML representation of the Databook as an Exportable.
|
||||
func (d *Databook) HTML() *Exportable {
|
||||
b := newBuffer()
|
||||
|
||||
for _, s := range d.sheets {
|
||||
b.WriteString("<h1>" + s.title + "</h1>\n")
|
||||
b.Write(s.dataset.HTML().Bytes())
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
|
||||
return newExportable(b)
|
||||
}
|
72
tablib/tablib_json.go
Normal file
72
tablib/tablib_json.go
Normal file
@ -0,0 +1,72 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// LoadJSON loads a dataset from a YAML source.
|
||||
func LoadJSON(jsonContent []byte) (*Dataset, error) {
|
||||
var input []map[string]interface{}
|
||||
if err := json.Unmarshal(jsonContent, &input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return internalLoadFromDict(input)
|
||||
}
|
||||
|
||||
// LoadDatabookJSON loads a Databook from a JSON source.
|
||||
func LoadDatabookJSON(jsonContent []byte) (*Databook, error) {
|
||||
var input []map[string]interface{}
|
||||
var internalInput []map[string]interface{}
|
||||
if err := json.Unmarshal(jsonContent, &input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db := NewDatabook()
|
||||
for _, d := range input {
|
||||
b, err := json.Marshal(d["data"])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := json.Unmarshal(b, &internalInput); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ds, err := internalLoadFromDict(internalInput); err == nil {
|
||||
db.AddSheet(d["title"].(string), ds)
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// JSON returns a JSON representation of the Dataset as an Exportable.
|
||||
func (d *Dataset) JSON() (*Exportable, error) {
|
||||
back := d.Dict()
|
||||
|
||||
b, err := json.Marshal(back)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newExportableFromBytes(b), nil
|
||||
}
|
||||
|
||||
// JSON returns a JSON representation of the Databook as an Exportable.
|
||||
func (d *Databook) JSON() (*Exportable, error) {
|
||||
b := newBuffer()
|
||||
b.WriteString("[")
|
||||
for _, s := range d.sheets {
|
||||
b.WriteString("{\"title\": \"" + s.title + "\", \"data\": ")
|
||||
js, err := s.dataset.JSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b.Write(js.Bytes())
|
||||
b.WriteString("},")
|
||||
}
|
||||
by := b.Bytes()
|
||||
by[len(by)-1] = ']'
|
||||
return newExportableFromBytes(by), nil
|
||||
}
|
48
tablib/tablib_sort.go
Normal file
48
tablib/tablib_sort.go
Normal file
@ -0,0 +1,48 @@
|
||||
package tablib
|
||||
|
||||
import "time"
|
||||
|
||||
// entryPair represents a pair of a value and its row index in the dataset
|
||||
// which is used while sorting the dataset using a colum.
|
||||
type entryPair struct {
|
||||
index int
|
||||
value interface{}
|
||||
}
|
||||
|
||||
type byIntValue []entryPair
|
||||
|
||||
func (p byIntValue) Len() int { return len(p) }
|
||||
func (p byIntValue) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byIntValue) Less(i, j int) bool { return p[i].value.(int) < p[j].value.(int) }
|
||||
|
||||
type byInt64Value []entryPair
|
||||
|
||||
func (p byInt64Value) Len() int { return len(p) }
|
||||
func (p byInt64Value) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byInt64Value) Less(i, j int) bool { return p[i].value.(int64) < p[j].value.(int64) }
|
||||
|
||||
type byUint64Value []entryPair
|
||||
|
||||
func (p byUint64Value) Len() int { return len(p) }
|
||||
func (p byUint64Value) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byUint64Value) Less(i, j int) bool { return p[i].value.(uint64) < p[j].value.(uint64) }
|
||||
|
||||
type byFloatValue []entryPair
|
||||
|
||||
func (p byFloatValue) Len() int { return len(p) }
|
||||
func (p byFloatValue) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byFloatValue) Less(i, j int) bool { return p[i].value.(float64) < p[j].value.(float64) }
|
||||
|
||||
type byTimeValue []entryPair
|
||||
|
||||
func (p byTimeValue) Len() int { return len(p) }
|
||||
func (p byTimeValue) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byTimeValue) Less(i, j int) bool {
|
||||
return p[i].value.(time.Time).UnixNano() < p[j].value.(time.Time).UnixNano()
|
||||
}
|
||||
|
||||
type byStringValue []entryPair
|
||||
|
||||
func (p byStringValue) Len() int { return len(p) }
|
||||
func (p byStringValue) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p byStringValue) Less(i, j int) bool { return p[i].value.(string) < p[j].value.(string) }
|
149
tablib/tablib_sql.go
Normal file
149
tablib/tablib_sql.go
Normal file
@ -0,0 +1,149 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
typePostgres = "postgres"
|
||||
typeMySQL = "mysql"
|
||||
defaults = map[string]string{"various." + typePostgres: "TEXT",
|
||||
"various." + typeMySQL: "VARCHAR(100)", "numeric." + typePostgres: "NUMERIC",
|
||||
"numeric." + typeMySQL: "DOUBLE"}
|
||||
)
|
||||
|
||||
// columnSQLType determines the type of a column
|
||||
// if throughout the whole column values have the same type then this type is
|
||||
// returned, otherwise the VARCHAR/TEXT type is returned.
|
||||
// numeric types are coerced into DOUBLE/NUMERIC
|
||||
func (d *Dataset) columnSQLType(header, dbType string) (string, []interface{}) {
|
||||
types := 0
|
||||
currentType := ""
|
||||
maxString := 0
|
||||
values := d.Column(header)
|
||||
for _, c := range values {
|
||||
switch c.(type) {
|
||||
case uint, uint8, uint16, uint32, uint64,
|
||||
int, int8, int16, int32, int64,
|
||||
float32, float64:
|
||||
if currentType != "numeric" {
|
||||
currentType = "numeric"
|
||||
types++
|
||||
}
|
||||
case time.Time:
|
||||
if currentType != "time" {
|
||||
currentType = "time"
|
||||
types++
|
||||
}
|
||||
case string:
|
||||
if currentType != "string" {
|
||||
currentType = "string"
|
||||
types++
|
||||
}
|
||||
if len(c.(string)) > maxString {
|
||||
maxString = len(c.(string))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if types > 1 {
|
||||
return defaults["various."+dbType], values
|
||||
}
|
||||
switch currentType {
|
||||
case "numeric":
|
||||
return defaults["numeric."+dbType], values
|
||||
case "time":
|
||||
return "TIMESTAMP", values
|
||||
default:
|
||||
if dbType == typePostgres {
|
||||
return "TEXT", values
|
||||
}
|
||||
return "VARCHAR(" + strconv.Itoa(maxString) + ")", values
|
||||
}
|
||||
}
|
||||
|
||||
// isStringColumn returns whether a column is VARCHAR/TEXT
|
||||
func isStringColumn(c string) bool {
|
||||
return strings.HasPrefix(c, "VARCHAR") || strings.HasPrefix(c, "TEXT")
|
||||
}
|
||||
|
||||
// MySQL returns a string representing a suite of MySQL commands
|
||||
// recreating the Dataset into a table.
|
||||
func (d *Dataset) MySQL(table string) *Exportable {
|
||||
return d.sql(table, typeMySQL)
|
||||
}
|
||||
|
||||
// Postgres returns a string representing a suite of Postgres commands
|
||||
// recreating the Dataset into a table.
|
||||
func (d *Dataset) Postgres(table string) *Exportable {
|
||||
return d.sql(table, typePostgres)
|
||||
}
|
||||
|
||||
// sql returns a string representing a suite of SQL commands
|
||||
// recreating the Dataset into a table.
|
||||
func (d *Dataset) sql(table, dbType string) *Exportable {
|
||||
b := newBuffer()
|
||||
|
||||
tableSQL, columnTypes, columnValues := d.createTable(table, dbType)
|
||||
b.WriteString(tableSQL)
|
||||
|
||||
reg, _ := regexp.Compile("[']")
|
||||
// inserts
|
||||
for i := range d.data {
|
||||
b.WriteString("INSERT INTO " + table + " VALUES(" + strconv.Itoa(i+1) + ", ")
|
||||
for j, col := range d.headers {
|
||||
asStr := d.asString(columnValues[col][i])
|
||||
if isStringColumn(columnTypes[col]) {
|
||||
b.WriteString("'" + reg.ReplaceAllString(asStr, "''") + "'")
|
||||
} else if strings.HasPrefix(columnTypes[col], "TIMESTAMP") {
|
||||
if dbType == typeMySQL {
|
||||
b.WriteString("CONVERT_TZ('" + asStr[:10] + " " + asStr[11:19] + "', '" + asStr[len(asStr)-6:] + "', 'SYSTEM')")
|
||||
} else {
|
||||
b.WriteString("'" + asStr + "'") // simpler with Postgres
|
||||
}
|
||||
} else {
|
||||
b.WriteString(asStr)
|
||||
}
|
||||
if j < len(d.headers)-1 {
|
||||
b.WriteString(", ")
|
||||
}
|
||||
}
|
||||
b.WriteString(");\n")
|
||||
}
|
||||
b.WriteString("\nCOMMIT;\n")
|
||||
|
||||
return newExportable(b)
|
||||
}
|
||||
|
||||
func (d *Dataset) createTable(table, dbType string) (string, map[string]string, map[string][]interface{}) {
|
||||
var b bytes.Buffer
|
||||
columnValues := make(map[string][]interface{})
|
||||
columnTypes := make(map[string]string)
|
||||
|
||||
// create table
|
||||
b.WriteString("CREATE TABLE IF NOT EXISTS " + table)
|
||||
if dbType == typePostgres {
|
||||
b.WriteString("\n(\n\tid SERIAL PRIMARY KEY,\n")
|
||||
} else {
|
||||
b.WriteString("\n(\n\tid INT NOT NULL AUTO_INCREMENT PRIMARY KEY,\n")
|
||||
}
|
||||
for i, h := range d.headers {
|
||||
b.WriteString("\t" + h)
|
||||
t, v := d.columnSQLType(h, dbType)
|
||||
columnValues[h] = v
|
||||
columnTypes[h] = t
|
||||
b.WriteString(" " + t)
|
||||
if i < len(d.headers)-1 {
|
||||
b.WriteString(",")
|
||||
}
|
||||
b.WriteString("\n")
|
||||
}
|
||||
|
||||
b.WriteString(");\n\n")
|
||||
|
||||
return b.String(), columnTypes, columnValues
|
||||
}
|
88
tablib/tablib_tabular.go
Normal file
88
tablib/tablib_tabular.go
Normal file
@ -0,0 +1,88 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"GoSungrow/tablib/gotabulate"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var (
|
||||
// TabularGrid is the value to be passed to gotabulate to render the table
|
||||
// as ASCII table with grid format
|
||||
TabularGrid = "grid"
|
||||
// TabularSimple is the value to be passed to gotabulate to render the table
|
||||
// as ASCII table with simple format
|
||||
TabularSimple = "simple"
|
||||
// TabularCondensed is the value to be passed to gotabulate to render the table
|
||||
// as ASCII table with condensed format
|
||||
TabularCondensed = "condensed"
|
||||
// TabularMarkdown is the value to be passed to gotabulate to render the table
|
||||
// as ASCII table with Markdown format
|
||||
TabularMarkdown = "markdown"
|
||||
|
||||
TabularUtf8 = "utf8"
|
||||
TabularMickMake = "mickmake"
|
||||
)
|
||||
|
||||
// Markdown returns a Markdown table Exportable representation of the Dataset.
|
||||
func (d *Dataset) Markdown() *Exportable {
|
||||
return d.Tabular(TabularMarkdown)
|
||||
}
|
||||
|
||||
// Tabular returns a tabular Exportable representation of the Dataset.
|
||||
// format is either grid, simple, condensed or markdown.
|
||||
func (d *Dataset) Tabular(format string) *Exportable {
|
||||
back := d.Records()
|
||||
t := gotabulate.Create(back)
|
||||
t.SetAlign(d.Align)
|
||||
t.SetEmptyString(d.EmptyString)
|
||||
t.SetFloatFormat(d.FloatFormat)
|
||||
if d.MaxCellSize == 0 {
|
||||
d.MaxCellSize = 30
|
||||
}
|
||||
t.SetMaxCellSize(d.MaxCellSize)
|
||||
t.SetWrapDelimiter(d.WrapDelimiter)
|
||||
t.SetWrapStrings(d.WrapStrings)
|
||||
if d.DenseMode {
|
||||
t.SetDenseMode()
|
||||
}
|
||||
t.SetSplitConcat(d.SplitConcat)
|
||||
|
||||
if format == TabularCondensed || format == TabularMarkdown || format == TabularUtf8 || format == TabularMickMake {
|
||||
rendered := regexp.MustCompile("\n\n\\s").ReplaceAllString(t.Render(format), "\n ")
|
||||
if format == TabularMarkdown {
|
||||
firstLine := regexp.MustCompile("-\\s+-").ReplaceAllString(strings.Split(rendered, "\n")[0], "- | -")
|
||||
// now just locate the position of pipe characters, and set them
|
||||
positions := make([]int, 0, d.cols-1)
|
||||
x := 0
|
||||
for _, c := range firstLine {
|
||||
if c == '|' {
|
||||
positions = append(positions, x)
|
||||
}
|
||||
x += utf8.RuneLen(c)
|
||||
}
|
||||
|
||||
b := newBuffer()
|
||||
lines := strings.Split(rendered, "\n")
|
||||
for _, line := range lines[1 : len(lines)-2] {
|
||||
ipos := 0
|
||||
b.WriteString("| ")
|
||||
for _, pos := range positions {
|
||||
if ipos < len(line) && pos < len(line) {
|
||||
b.WriteString(line[ipos:pos])
|
||||
b.WriteString(" | ")
|
||||
ipos = pos + 1
|
||||
}
|
||||
}
|
||||
if ipos < len(line) {
|
||||
b.WriteString(line[ipos:])
|
||||
}
|
||||
b.WriteString(" | \n")
|
||||
}
|
||||
return newExportable(b)
|
||||
}
|
||||
return newExportableFromString(rendered)
|
||||
}
|
||||
return newExportableFromString(t.Render(format))
|
||||
}
|
736
tablib/tablib_test.go
Normal file
736
tablib/tablib_test.go
Normal file
@ -0,0 +1,736 @@
|
||||
package tablib_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"testing"
|
||||
|
||||
tablib "github.com/agrison/go-tablib"
|
||||
. "gopkg.in/check.v1"
|
||||
)
|
||||
|
||||
func Test(t *testing.T) { TestingT(t) }
|
||||
|
||||
type TablibSuite struct{}
|
||||
|
||||
var _ = Suite(&TablibSuite{})
|
||||
|
||||
func presidentDataset() *tablib.Dataset {
|
||||
ds := tablib.NewDataset([]string{"firstName", "lastName", "gpa"})
|
||||
ds.AppendValues("John", "Adams", 90)
|
||||
ds.AppendValues("George", "Washington", 67)
|
||||
ds.AppendValues("Thomas", "Jefferson", 50)
|
||||
return ds
|
||||
}
|
||||
|
||||
func presidentDatasetWithTags() *tablib.Dataset {
|
||||
ds := tablib.NewDataset([]string{"firstName", "lastName", "gpa"})
|
||||
ds.AppendTagged([]interface{}{"John", "Adams", 90}, "Massachusetts")
|
||||
ds.AppendTagged([]interface{}{"George", "Washington", 67}, "Virginia")
|
||||
ds.AppendTagged([]interface{}{"Thomas", "Jefferson", 50}, "Virginia")
|
||||
return ds
|
||||
}
|
||||
|
||||
func frenchPresidentDataset() *tablib.Dataset {
|
||||
ds := tablib.NewDataset([]string{"firstName", "lastName", "gpa"})
|
||||
ds.AppendValues("Jacques", "Chirac", 88)
|
||||
ds.AppendValues("Nicolas", "Sarkozy", 98)
|
||||
ds.AppendValues("François", "Hollande", 34)
|
||||
return ds
|
||||
}
|
||||
|
||||
func frenchPresidentAdditionalDataset() *tablib.Dataset {
|
||||
ds := tablib.NewDataset([]string{"duration", "from"})
|
||||
ds.AppendValues(14, "Paris")
|
||||
ds.AppendValues(12, "Paris")
|
||||
ds.AppendValues(5, "Rouen")
|
||||
return ds
|
||||
}
|
||||
|
||||
func carDataset() *tablib.Dataset {
|
||||
ds := tablib.NewDataset([]string{"Maker", "Model", "Year"})
|
||||
ds.AppendValues("Porsche", "991", 2012)
|
||||
ds.AppendValues("Skoda", "Octavia", 2011)
|
||||
ds.AppendValues("Ferrari", "458", 2009)
|
||||
ds.AppendValues("Citroen", "Picasso II", 2013)
|
||||
ds.AppendValues("Bentley", "Continental GT", 2003)
|
||||
return ds
|
||||
}
|
||||
|
||||
func validRowAt(d *tablib.Dataset, index int) map[string]interface{} {
|
||||
row, _ := d.Row(index)
|
||||
return row
|
||||
}
|
||||
|
||||
func lastRow(d *tablib.Dataset) map[string]interface{} {
|
||||
row, _ := d.Row(d.Height() - 1)
|
||||
return row
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestDimensions(c *C) {
|
||||
ds := presidentDataset()
|
||||
c.Assert(ds.Width(), Equals, 3)
|
||||
c.Assert(ds.Height(), Equals, 3)
|
||||
c.Assert(ds.Headers(), DeepEquals, []string{"firstName", "lastName", "gpa"})
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestAppendRow(c *C) {
|
||||
ds := presidentDataset()
|
||||
// too much columns
|
||||
c.Assert(ds.AppendValues("a", "b", 50, "d"), Equals, tablib.ErrInvalidDimensions)
|
||||
// not enough columns
|
||||
c.Assert(ds.AppendValues("a", "b"), Equals, tablib.ErrInvalidDimensions)
|
||||
// ok
|
||||
c.Assert(ds.AppendValues("foo", "bar", 42), Equals, nil)
|
||||
// test values are there
|
||||
d := lastRow(ds)
|
||||
c.Assert(d["firstName"], Equals, "foo")
|
||||
c.Assert(d["lastName"], Equals, "bar")
|
||||
c.Assert(d["gpa"], Equals, 42)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestAppendColumn(c *C) {
|
||||
ds := presidentDataset()
|
||||
// too much rows
|
||||
c.Assert(ds.AppendColumnValues("foo", "a", "b", "c", "d"), Equals, tablib.ErrInvalidDimensions)
|
||||
// not enough columns
|
||||
c.Assert(ds.AppendColumnValues("foo", "a", "b"), Equals, tablib.ErrInvalidDimensions)
|
||||
// ok
|
||||
c.Assert(ds.AppendColumnValues("foo", "a", "b", "c"), Equals, nil)
|
||||
// test values are there
|
||||
d := ds.Column("foo")
|
||||
c.Assert(d[0], Equals, "a")
|
||||
c.Assert(d[1], Equals, "b")
|
||||
c.Assert(d[2], Equals, "c")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestInsert(c *C) {
|
||||
ds := presidentDataset()
|
||||
// invalid index
|
||||
c.Assert(ds.InsertValues(-1, "foo", "bar"), Equals, tablib.ErrInvalidRowIndex)
|
||||
c.Assert(ds.InsertValues(100, "foo", "bar"), Equals, tablib.ErrInvalidRowIndex)
|
||||
// too much columns
|
||||
c.Assert(ds.InsertValues(1, "foo", "bar", 42, "invalid"), Equals, tablib.ErrInvalidDimensions)
|
||||
// not enough columns
|
||||
c.Assert(ds.InsertValues(1, "foo", "bar"), Equals, tablib.ErrInvalidDimensions)
|
||||
// ok
|
||||
c.Assert(ds.InsertValues(1, "foo", "bar", 42), Equals, nil)
|
||||
// test values are there
|
||||
d := validRowAt(ds, 1)
|
||||
c.Assert(d["firstName"], Equals, "foo")
|
||||
c.Assert(d["lastName"], Equals, "bar")
|
||||
c.Assert(d["gpa"], Equals, 42)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestInsertColumn(c *C) {
|
||||
ds := presidentDataset()
|
||||
// invalid index
|
||||
c.Assert(ds.InsertColumn(-1, "wut", []interface{}{"foo", "bar"}), Equals, tablib.ErrInvalidColumnIndex)
|
||||
c.Assert(ds.InsertColumn(100, "wut", []interface{}{"foo", "bar"}), Equals, tablib.ErrInvalidColumnIndex)
|
||||
// too much rows
|
||||
c.Assert(ds.InsertColumn(1, "wut", []interface{}{"foo", "bar", "baz", "kidding"}), Equals, tablib.ErrInvalidDimensions)
|
||||
// not enough rows
|
||||
c.Assert(ds.InsertColumn(1, "wut", []interface{}{"foo", "bar"}), Equals, tablib.ErrInvalidDimensions)
|
||||
// ok
|
||||
c.Assert(ds.InsertColumn(1, "wut", []interface{}{"foo", "bar", "baz"}), Equals, nil)
|
||||
// test values are there
|
||||
d := ds.Column("wut")
|
||||
c.Assert(d[0], Equals, "foo")
|
||||
c.Assert(d[1], Equals, "bar")
|
||||
c.Assert(d[2], Equals, "baz")
|
||||
}
|
||||
|
||||
func firstNameB64(row []interface{}) interface{} {
|
||||
return base64.StdEncoding.EncodeToString([]byte(row[0].(string)))
|
||||
}
|
||||
|
||||
func lastNameB64(row []interface{}) interface{} {
|
||||
return base64.StdEncoding.EncodeToString([]byte(row[1].(string)))
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestDynamicColumn(c *C) {
|
||||
ds := presidentDataset()
|
||||
ds.AppendDynamicColumn("firstB64", firstNameB64)
|
||||
d := ds.Column("firstB64")
|
||||
c.Assert(d[0], Equals, "Sm9obg==") // John
|
||||
c.Assert(d[1], Equals, "R2Vvcmdl") // George
|
||||
c.Assert(d[2], Equals, "VGhvbWFz") // Thomas
|
||||
|
||||
// invalid index
|
||||
c.Assert(ds.InsertDynamicColumn(-1, "foo", lastNameB64), Equals, tablib.ErrInvalidColumnIndex)
|
||||
c.Assert(ds.InsertDynamicColumn(100, "foo", lastNameB64), Equals, tablib.ErrInvalidColumnIndex)
|
||||
// ok
|
||||
c.Assert(ds.InsertDynamicColumn(2, "lastB64", lastNameB64), Equals, nil)
|
||||
// check values
|
||||
d = ds.Column("lastB64")
|
||||
c.Assert(d[0], Equals, "QWRhbXM=") // Adams
|
||||
c.Assert(d[1], Equals, "V2FzaGluZ3Rvbg==") // Washington
|
||||
c.Assert(d[2], Equals, "SmVmZmVyc29u") // Jefferson
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestRow(c *C) {
|
||||
ds := presidentDataset()
|
||||
row, err := ds.Row(-1)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
row, err = ds.Row(100)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
row, err = ds.Row(1)
|
||||
c.Assert(err, Equals, nil)
|
||||
c.Assert(row["firstName"], Equals, "George")
|
||||
c.Assert(row["lastName"], Equals, "Washington")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestRows(c *C) {
|
||||
ds := presidentDataset()
|
||||
rows, err := ds.Rows(-1, 5)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
rows, err = ds.Rows(0, 1, 100)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
rows, err = ds.Rows(1, 2)
|
||||
c.Assert(err, Equals, nil)
|
||||
c.Assert(rows[0]["firstName"], Equals, "George")
|
||||
c.Assert(rows[0]["lastName"], Equals, "Washington")
|
||||
c.Assert(rows[1]["firstName"], Equals, "Thomas")
|
||||
c.Assert(rows[1]["lastName"], Equals, "Jefferson")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestSlice(c *C) {
|
||||
ds := presidentDataset()
|
||||
_, err := ds.Slice(-1, 5) // invalid lower bound
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
_, err = ds.Slice(0, 100) // invalider upper bound
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
_, err = ds.Slice(1, 0) // lower bound > upper bound
|
||||
c.Assert(err, Equals, tablib.ErrInvalidRowIndex)
|
||||
s1, err := ds.Slice(1, 2) // single row
|
||||
c.Assert(err, Equals, nil)
|
||||
c.Assert(s1.Height(), Equals, 1)
|
||||
row, _ := s1.Row(0)
|
||||
c.Assert(row["firstName"], Equals, "George")
|
||||
c.Assert(row["lastName"], Equals, "Washington")
|
||||
s2, err := ds.Slice(1, 3) // two rows
|
||||
c.Assert(err, Equals, nil)
|
||||
c.Assert(s2.Height(), Equals, 2)
|
||||
row, _ = s2.Row(0)
|
||||
c.Assert(row["firstName"], Equals, "George")
|
||||
c.Assert(row["lastName"], Equals, "Washington")
|
||||
row, _ = s2.Row(1)
|
||||
c.Assert(row["firstName"], Equals, "Thomas")
|
||||
c.Assert(row["lastName"], Equals, "Jefferson")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestTranspose(c *C) {
|
||||
tr := carDataset().Transpose()
|
||||
c.Assert(tr.Headers()[0], Equals, "Maker")
|
||||
c.Assert(tr.Headers()[1], Equals, "Porsche")
|
||||
c.Assert(tr.Headers()[2], Equals, "Skoda")
|
||||
c.Assert(len(tr.Headers()), Equals, 6)
|
||||
r := validRowAt(tr, 0) // Model
|
||||
c.Assert(r["Porsche"], Equals, "991")
|
||||
c.Assert(r["Bentley"], Equals, "Continental GT")
|
||||
r = validRowAt(tr, 1) // Year
|
||||
c.Assert(r["Porsche"], Equals, 2012)
|
||||
c.Assert(r["Skoda"], Equals, 2011)
|
||||
c.Assert(r["Bentley"], Equals, 2003)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestStack(c *C) {
|
||||
ds, _ := presidentDataset().Stack(frenchPresidentDataset())
|
||||
d := ds.Column("lastName")
|
||||
c.Assert(d[0], Equals, "Adams")
|
||||
c.Assert(d[1], Equals, "Washington")
|
||||
c.Assert(d[2], Equals, "Jefferson")
|
||||
c.Assert(d[3], Equals, "Chirac")
|
||||
c.Assert(d[4], Equals, "Sarkozy")
|
||||
c.Assert(d[5], Equals, "Hollande")
|
||||
|
||||
// check invalid dimensions
|
||||
x := frenchPresidentDataset()
|
||||
x.DeleteColumn("lastName")
|
||||
ds, err := presidentDataset().Stack(x)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidDimensions)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestStackColumn(c *C) {
|
||||
ds, _ := frenchPresidentDataset().StackColumn(frenchPresidentAdditionalDataset())
|
||||
d := lastRow(ds)
|
||||
c.Assert(d["firstName"], Equals, "François")
|
||||
c.Assert(d["lastName"], Equals, "Hollande")
|
||||
c.Assert(d["from"], Equals, "Rouen")
|
||||
c.Assert(d["duration"], Equals, 5)
|
||||
|
||||
// check invalid dimensions
|
||||
x := frenchPresidentAdditionalDataset()
|
||||
x.DeleteRow(x.Height() - 1)
|
||||
ds, err := frenchPresidentDataset().StackColumn(x)
|
||||
c.Assert(err, Equals, tablib.ErrInvalidDimensions)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestFiltering(c *C) {
|
||||
ds := presidentDatasetWithTags()
|
||||
df := ds.Filter("Massachusetts")
|
||||
c.Assert(df.Height(), Equals, 1)
|
||||
r := lastRow(df)
|
||||
c.Assert(r["firstName"], Equals, "John")
|
||||
c.Assert(r["lastName"], Equals, "Adams")
|
||||
|
||||
df = ds.Filter("Virginia")
|
||||
c.Assert(df.Height(), Equals, 2)
|
||||
r = validRowAt(df, 0)
|
||||
c.Assert(r["firstName"], Equals, "George")
|
||||
c.Assert(r["lastName"], Equals, "Washington")
|
||||
r = validRowAt(df, 1)
|
||||
c.Assert(r["firstName"], Equals, "Thomas")
|
||||
c.Assert(r["lastName"], Equals, "Jefferson")
|
||||
|
||||
df = ds.Filter("Woot")
|
||||
c.Assert(df.Height(), Equals, 0)
|
||||
c.Assert(df.Width(), Equals, 3)
|
||||
|
||||
df = ds.Filter("Virginia")
|
||||
tags, _ := df.Tags(1) // Jefferson
|
||||
c.Assert(len(tags), Equals, 1)
|
||||
c.Assert(tags[0], Equals, "Virginia")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestSort(c *C) {
|
||||
ds := presidentDataset().Sort("gpa")
|
||||
c.Assert(ds.Height(), Equals, 3)
|
||||
|
||||
r := validRowAt(ds, 0)
|
||||
c.Assert(r["firstName"], Equals, "Thomas")
|
||||
c.Assert(r["lastName"], Equals, "Jefferson")
|
||||
c.Assert(r["gpa"], Equals, 50)
|
||||
|
||||
r = validRowAt(ds, 1)
|
||||
c.Assert(r["firstName"], Equals, "George")
|
||||
c.Assert(r["lastName"], Equals, "Washington")
|
||||
c.Assert(r["gpa"], Equals, 67)
|
||||
|
||||
r = validRowAt(ds, 2)
|
||||
c.Assert(r["firstName"], Equals, "John")
|
||||
c.Assert(r["lastName"], Equals, "Adams")
|
||||
c.Assert(r["gpa"], Equals, 90)
|
||||
|
||||
ds = ds.SortReverse("lastName")
|
||||
c.Assert(ds.Height(), Equals, 3)
|
||||
|
||||
r = validRowAt(ds, 0)
|
||||
c.Assert(r["firstName"], Equals, "George")
|
||||
c.Assert(r["lastName"], Equals, "Washington")
|
||||
|
||||
r = validRowAt(ds, 1)
|
||||
c.Assert(r["firstName"], Equals, "Thomas")
|
||||
c.Assert(r["lastName"], Equals, "Jefferson")
|
||||
|
||||
r = validRowAt(ds, 2)
|
||||
c.Assert(r["firstName"], Equals, "John")
|
||||
c.Assert(r["lastName"], Equals, "Adams")
|
||||
}
|
||||
|
||||
func mustBeYoung(val interface{}) bool {
|
||||
return val.(int) <= 50
|
||||
}
|
||||
|
||||
func mustBeOld(val interface{}) bool {
|
||||
return val.(int) >= 50
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestValidFailFast(c *C) {
|
||||
ds := presidentDataset()
|
||||
|
||||
c.Assert(ds.HasAnyConstraint(), Equals, false)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeYoung)
|
||||
c.Assert(ds.HasAnyConstraint(), Equals, true)
|
||||
c.Assert(ds.ValidFailFast(), Equals, false)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeOld)
|
||||
c.Assert(ds.ValidFailFast(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestValid(c *C) {
|
||||
ds := presidentDataset()
|
||||
|
||||
c.Assert(ds.HasAnyConstraint(), Equals, false)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeOld)
|
||||
|
||||
c.Assert(ds.HasAnyConstraint(), Equals, true)
|
||||
c.Assert(ds.Valid(), Equals, true)
|
||||
c.Assert(len(ds.ValidationErrors), Equals, 0)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeYoung)
|
||||
c.Assert(ds.Valid(), Equals, false)
|
||||
c.Assert(len(ds.ValidationErrors), Equals, 2)
|
||||
|
||||
c.Assert(ds.ValidationErrors[0].Row, Equals, 0)
|
||||
c.Assert(ds.ValidationErrors[0].Column, Equals, 2)
|
||||
|
||||
c.Assert(ds.ValidationErrors[1].Row, Equals, 1)
|
||||
c.Assert(ds.ValidationErrors[1].Column, Equals, 2)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestValidSubset(c *C) {
|
||||
ds := presidentDatasetWithTags()
|
||||
|
||||
c.Assert(ds.Valid(), Equals, true)
|
||||
c.Assert(ds.ValidSubset(), Equals, ds)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeYoung)
|
||||
c.Assert(ds.Valid(), Equals, false)
|
||||
c.Assert(len(ds.ValidationErrors), Equals, 2)
|
||||
|
||||
// Height is 1
|
||||
df := ds.ValidSubset()
|
||||
c.Assert(df.Height(), Equals, 1)
|
||||
c.Assert(df.HasAnyConstraint(), Equals, false)
|
||||
r := validRowAt(df, 0)
|
||||
c.Assert(r["firstName"], Equals, "Thomas")
|
||||
tags, _ := df.Tags(0)
|
||||
c.Assert(tags[0], Equals, "Virginia")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestInvalidSubset(c *C) {
|
||||
ds := presidentDatasetWithTags()
|
||||
|
||||
c.Assert(ds.Valid(), Equals, true)
|
||||
c.Assert(ds.InvalidSubset(), Equals, ds)
|
||||
|
||||
ds.ConstrainColumn("gpa", mustBeYoung)
|
||||
c.Assert(ds.Valid(), Equals, false)
|
||||
c.Assert(len(ds.ValidationErrors), Equals, 2)
|
||||
|
||||
// Height is 2
|
||||
df := ds.InvalidSubset()
|
||||
c.Assert(df.Height(), Equals, 2)
|
||||
c.Assert(df.HasAnyConstraint(), Equals, false)
|
||||
r := validRowAt(df, 0)
|
||||
c.Assert(r["firstName"], Equals, "John")
|
||||
tags, _ := df.Tags(0)
|
||||
c.Assert(tags[0], Equals, "Massachusetts")
|
||||
r = validRowAt(df, 1)
|
||||
c.Assert(r["firstName"], Equals, "George")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestJSON(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j, _ := ds.JSON()
|
||||
c.Assert(j.String(), Equals, "[{\"firstName\":\"Jacques\",\"gpa\":88,\"lastName\":\"Chirac\"},{\"firstName\":\"Nicolas\",\"gpa\":98,\"lastName\":\"Sarkozy\"},{\"firstName\":\"François\",\"gpa\":34,\"lastName\":\"Hollande\"}]")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestYAML(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j, _ := ds.YAML()
|
||||
c.Assert(j.String(), Equals, `- firstName: Jacques
|
||||
gpa: 88
|
||||
lastName: Chirac
|
||||
- firstName: Nicolas
|
||||
gpa: 98
|
||||
lastName: Sarkozy
|
||||
- firstName: François
|
||||
gpa: 34
|
||||
lastName: Hollande
|
||||
`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestCSV(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j, _ := ds.CSV()
|
||||
c.Assert(j.String(), Equals, `firstName,lastName,gpa
|
||||
Jacques,Chirac,88
|
||||
Nicolas,Sarkozy,98
|
||||
François,Hollande,34
|
||||
`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestTSV(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j, _ := ds.TSV()
|
||||
c.Assert(j.String(), Equals, `firstName`+"\t"+`lastName`+"\t"+`gpa
|
||||
Jacques`+"\t"+`Chirac`+"\t"+`88
|
||||
Nicolas`+"\t"+`Sarkozy`+"\t"+`98
|
||||
François`+"\t"+`Hollande`+"\t"+`34
|
||||
`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestHTML(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j := ds.HTML()
|
||||
c.Assert(j.String(), Equals, `<table class="table table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>firstName</th>
|
||||
<th>lastName</th>
|
||||
<th>gpa</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Jacques</td>
|
||||
<td>Chirac</td>
|
||||
<td>88</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Nicolas</td>
|
||||
<td>Sarkozy</td>
|
||||
<td>98</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>François</td>
|
||||
<td>Hollande</td>
|
||||
<td>34</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestTabular(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j := ds.Tabular(tablib.TabularGrid)
|
||||
c.Assert(j.String(), Equals, `+--------------+-------------+--------+
|
||||
| firstName | lastName | gpa |
|
||||
+==============+=============+========+
|
||||
| Jacques | Chirac | 88 |
|
||||
+--------------+-------------+--------+
|
||||
| Nicolas | Sarkozy | 98 |
|
||||
+--------------+-------------+--------+
|
||||
| François | Hollande | 34 |
|
||||
+--------------+-------------+--------+
|
||||
`)
|
||||
|
||||
j = ds.Tabular(tablib.TabularSimple)
|
||||
c.Assert(j.String(), Equals, `-------------- ------------- --------`+"\n"+
|
||||
` firstName lastName gpa `+"\n"+
|
||||
`-------------- ------------- --------`+"\n"+
|
||||
` Jacques Chirac 88 `+"\n"+
|
||||
"\n"+
|
||||
` Nicolas Sarkozy 98 `+"\n"+
|
||||
"\n"+
|
||||
` François Hollande 34 `+"\n"+
|
||||
`-------------- ------------- --------`+
|
||||
"\n")
|
||||
|
||||
j = ds.Tabular(tablib.TabularCondensed)
|
||||
c.Assert(j.String(), Equals, `-------------- ------------- --------`+"\n"+
|
||||
` firstName lastName gpa `+"\n"+
|
||||
`-------------- ------------- --------`+"\n"+
|
||||
` Jacques Chirac 88 `+"\n"+
|
||||
` Nicolas Sarkozy 98 `+"\n"+
|
||||
` François Hollande 34 `+"\n"+
|
||||
`-------------- ------------- --------`+
|
||||
"\n")
|
||||
|
||||
j = presidentDataset().Tabular(tablib.TabularMarkdown)
|
||||
c.Assert(j.String(), Equals, `| firstName | lastName | gpa |`+" \n"+
|
||||
`| -------------- | --------------- | ------- |`+" \n"+
|
||||
`| John | Adams | 90 |`+" \n"+
|
||||
`| George | Washington | 67 |`+" \n"+
|
||||
`| Thomas | Jefferson | 50 |`+" \n")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestMySQL(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j := ds.MySQL("presidents")
|
||||
c.Assert(j.String(), Equals, `CREATE TABLE IF NOT EXISTS presidents
|
||||
(
|
||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
||||
firstName VARCHAR(9),
|
||||
lastName VARCHAR(8),
|
||||
gpa DOUBLE
|
||||
);
|
||||
|
||||
INSERT INTO presidents VALUES(1, 'Jacques', 'Chirac', 88);
|
||||
INSERT INTO presidents VALUES(2, 'Nicolas', 'Sarkozy', 98);
|
||||
INSERT INTO presidents VALUES(3, 'François', 'Hollande', 34);
|
||||
|
||||
COMMIT;
|
||||
`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestPostgres(c *C) {
|
||||
ds := frenchPresidentDataset()
|
||||
j := ds.Postgres("presidents")
|
||||
c.Assert(j.String(), Equals, `CREATE TABLE IF NOT EXISTS presidents
|
||||
(
|
||||
id SERIAL PRIMARY KEY,
|
||||
firstName TEXT,
|
||||
lastName TEXT,
|
||||
gpa NUMERIC
|
||||
);
|
||||
|
||||
INSERT INTO presidents VALUES(1, 'Jacques', 'Chirac', 88);
|
||||
INSERT INTO presidents VALUES(2, 'Nicolas', 'Sarkozy', 98);
|
||||
INSERT INTO presidents VALUES(3, 'François', 'Hollande', 34);
|
||||
|
||||
COMMIT;
|
||||
`)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestLoadDatabookJSON(c *C) {
|
||||
var b bytes.Buffer
|
||||
b.WriteString(`[
|
||||
{
|
||||
"title": "Cars",
|
||||
"data": [
|
||||
{"Maker":"Bentley","Model":"Continental GT","Year":2003},
|
||||
{"Maker":"Ferrari","Model":"458","Year":2009},
|
||||
{"Maker":"Skoda","Model":"Octavia","Year":2011},
|
||||
{"Maker":"Porsche","Model":"991","Year":2012},
|
||||
{"Maker":"Citroen","Model":"Picasso II","Year":2013}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Presidents",
|
||||
"data": [
|
||||
{"Age":90,"First name":"John","Last name":"Adams"},
|
||||
{"Age":83,"First name":"Henry","Last name":"Ford"},
|
||||
{"Age":67,"First name":"George","Last name":"Washington"}
|
||||
]
|
||||
}
|
||||
]`)
|
||||
db, _ := tablib.LoadDatabookJSON(b.Bytes())
|
||||
c.Assert(db.Size(), Equals, 2)
|
||||
c.Assert(db.Sheet("Cars").Title(), Equals, "Cars")
|
||||
c.Assert(db.Sheet("Cars").Dataset().Height(), Equals, 5)
|
||||
c.Assert(db.Sheet("Presidents").Title(), Equals, "Presidents")
|
||||
c.Assert(db.Sheet("Presidents").Dataset().Height(), Equals, 3)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestLoadDatabookYAML(c *C) {
|
||||
var b bytes.Buffer
|
||||
b.WriteString(`- data:
|
||||
- Maker: Bentley
|
||||
Model: Continental GT
|
||||
Year: 2003
|
||||
- Maker: Ferrari
|
||||
Model: "458"
|
||||
Year: 2009
|
||||
- Maker: Skoda
|
||||
Model: Octavia
|
||||
Year: 2011
|
||||
- Maker: Porsche
|
||||
Model: "991"
|
||||
Year: 2012
|
||||
- Maker: Citroen
|
||||
Model: Picasso II
|
||||
Year: 2013
|
||||
title: Cars
|
||||
- data:
|
||||
- Age: 90
|
||||
First name: John
|
||||
Last name: Adams
|
||||
Maker: Bentley
|
||||
Model: Continental GT
|
||||
Year: 2003
|
||||
- Age: 83
|
||||
First name: Henry
|
||||
Last name: Ford
|
||||
Maker: Ferrari
|
||||
Model: "458"
|
||||
Year: 2009
|
||||
- Age: 67
|
||||
First name: George
|
||||
Last name: Washington
|
||||
Maker: Skoda
|
||||
Model: Octavia
|
||||
Year: 2011
|
||||
title: Presidents
|
||||
`)
|
||||
db, _ := tablib.LoadDatabookYAML(b.Bytes())
|
||||
c.Assert(db.Size(), Equals, 2)
|
||||
c.Assert(db.Sheet("Cars").Title(), Equals, "Cars")
|
||||
c.Assert(db.Sheet("Cars").Dataset().Height(), Equals, 5)
|
||||
c.Assert(db.Sheet("Presidents").Title(), Equals, "Presidents")
|
||||
c.Assert(db.Sheet("Presidents").Dataset().Height(), Equals, 3)
|
||||
|
||||
c.Assert(len(db.Sheets()), Equals, 2)
|
||||
db.Wipe()
|
||||
c.Assert(db.Size(), Equals, 0)
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestLoadCSV(c *C) {
|
||||
var b bytes.Buffer
|
||||
b.WriteString(`Maker, Model, Year
|
||||
Bentley, Continental GT, 2003
|
||||
Ferrari, 458, 2009
|
||||
Skoda, Octavia, 2011
|
||||
Porsche, 991, 2012
|
||||
Citroen, Picasso II, 2013`)
|
||||
ds, _ := tablib.LoadCSV(b.Bytes())
|
||||
c.Assert(ds.Height(), Equals, 5)
|
||||
c.Assert(ds.Width(), Equals, 3)
|
||||
r, _ := ds.Row(0)
|
||||
c.Assert(r["Maker"], Equals, "Bentley")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestLoadTSV(c *C) {
|
||||
var b bytes.Buffer
|
||||
b.WriteString(`Maker` + "\t" + `Model` + "\t" + `Year
|
||||
Bentley` + "\t" + `Continental GT` + "\t" + `2003
|
||||
Ferrari` + "\t" + `458` + "\t" + `2009
|
||||
Skoda` + "\t" + `Octavia` + "\t" + `2011
|
||||
Porsche` + "\t" + `991` + "\t" + `2012
|
||||
Citroen` + "\t" + `Picasso II` + "\t" + `2013`)
|
||||
ds, _ := tablib.LoadTSV(b.Bytes())
|
||||
c.Assert(ds.Height(), Equals, 5)
|
||||
c.Assert(ds.Width(), Equals, 3)
|
||||
r, _ := ds.Row(0)
|
||||
c.Assert(r["Maker"], Equals, "Bentley")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestLoadXML(c *C) {
|
||||
var b bytes.Buffer
|
||||
b.WriteString(`<dataset>
|
||||
<row>
|
||||
<age>90</age>
|
||||
<firstName>John</firstName>
|
||||
<lastName>Adams</lastName>
|
||||
</row> <row>
|
||||
<age>67</age>
|
||||
<firstName>George</firstName>
|
||||
<lastName>Washington</lastName>
|
||||
</row> <row>
|
||||
<age>83</age>
|
||||
<firstName>Henry</firstName>
|
||||
<lastName>Ford</lastName>
|
||||
</row>
|
||||
</dataset>`)
|
||||
ds, _ := tablib.LoadXML(b.Bytes())
|
||||
c.Assert(ds.Height(), Equals, 3)
|
||||
c.Assert(ds.Width(), Equals, 3)
|
||||
r, _ := ds.Row(0)
|
||||
c.Assert(r["firstName"], Equals, "John")
|
||||
r, _ = ds.Row(1)
|
||||
c.Assert(r["lastName"], Equals, "Washington")
|
||||
}
|
||||
|
||||
func (s *TablibSuite) TestXML(c *C) {
|
||||
ds := presidentDataset()
|
||||
xml, err := ds.XML()
|
||||
c.Assert(err, Equals, nil)
|
||||
c.Assert(xml.String(), Equals, "<dataset>\n"+
|
||||
" <row>\n"+
|
||||
" <firstName>John</firstName>\n"+
|
||||
" <gpa>90</gpa>\n"+
|
||||
" <lastName>Adams</lastName>\n"+
|
||||
" </row> <row>\n"+
|
||||
" <firstName>George</firstName>\n"+
|
||||
" <gpa>67</gpa>\n"+
|
||||
" <lastName>Washington</lastName>\n"+
|
||||
" </row> <row>\n"+
|
||||
" <firstName>Thomas</firstName>\n"+
|
||||
" <gpa>50</gpa>\n"+
|
||||
" <lastName>Jefferson</lastName>\n"+
|
||||
" </row>\n"+
|
||||
" </dataset>")
|
||||
}
|
||||
|
||||
// ---------- Benchmarking ----------
|
||||
|
||||
func (s *TablibSuite) BenchmarkAppendRow(c *C) {
|
||||
benchDataset1 := frenchPresidentDataset()
|
||||
for i := 0; i < c.N; i++ {
|
||||
benchDataset1.AppendValues("foo", "bar", 42)
|
||||
}
|
||||
}
|
65
tablib/tablib_util.go
Normal file
65
tablib/tablib_util.go
Normal file
@ -0,0 +1,65 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// internalLoadFromDict creates a Dataset from an array of map representing columns.
|
||||
func internalLoadFromDict(input []map[string]interface{}) (*Dataset, error) {
|
||||
// retrieve columns
|
||||
headers := make([]string, 0, 10)
|
||||
for h := range input[0] {
|
||||
headers = append(headers, h)
|
||||
}
|
||||
|
||||
ds := NewDataset(headers)
|
||||
for _, e := range input {
|
||||
row := make([]interface{}, 0, len(headers))
|
||||
for _, h := range headers {
|
||||
row = append(row, e[h])
|
||||
}
|
||||
ds.AppendValues(row...)
|
||||
}
|
||||
|
||||
return ds, nil
|
||||
}
|
||||
|
||||
// isTagged checks if a tag is in an array of tags.
|
||||
func isTagged(tag string, tags []string) bool {
|
||||
for _, t := range tags {
|
||||
if t == tag {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// asString returns a value as a string.
|
||||
func (d *Dataset) asString(vv interface{}) string {
|
||||
var v string
|
||||
switch vv.(type) {
|
||||
case string:
|
||||
v = vv.(string)
|
||||
case int:
|
||||
v = strconv.Itoa(vv.(int))
|
||||
case int64:
|
||||
v = strconv.FormatInt(vv.(int64), 10)
|
||||
case uint64:
|
||||
v = strconv.FormatUint(vv.(uint64), 10)
|
||||
case bool:
|
||||
v = strconv.FormatBool(vv.(bool))
|
||||
case float64:
|
||||
v = strconv.FormatFloat(vv.(float64), 'G', -1, 32)
|
||||
case time.Time:
|
||||
v = vv.(time.Time).Format(time.RFC3339)
|
||||
default:
|
||||
if d.EmptyValue != "" {
|
||||
v = d.EmptyValue
|
||||
} else {
|
||||
v = fmt.Sprintf("%s", v)
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
50
tablib/tablib_xlsx.go
Normal file
50
tablib/tablib_xlsx.go
Normal file
@ -0,0 +1,50 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"github.com/tealeg/xlsx"
|
||||
)
|
||||
|
||||
// XLSX exports the Dataset as a byte array representing the .xlsx format.
|
||||
func (d *Dataset) XLSX() (*Exportable, error) {
|
||||
file := xlsx.NewFile()
|
||||
if err := d.addXlsxSheetToFile(file, "Sheet 1"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := newBuffer()
|
||||
file.Write(b)
|
||||
return newExportable(b), nil
|
||||
}
|
||||
|
||||
// XLSX returns a XLSX representation of the Databook as an exportable.
|
||||
func (d *Databook) XLSX() (*Exportable, error) {
|
||||
file := xlsx.NewFile()
|
||||
|
||||
for _, s := range d.sheets {
|
||||
s.dataset.addXlsxSheetToFile(file, s.title)
|
||||
}
|
||||
|
||||
b := newBuffer()
|
||||
file.Write(b)
|
||||
return newExportable(b), nil
|
||||
}
|
||||
|
||||
func (d *Dataset) addXlsxSheetToFile(file *xlsx.File, sheetName string) error {
|
||||
sheet, err := file.AddSheet(sheetName)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
back := d.Records()
|
||||
for i, r := range back {
|
||||
row := sheet.AddRow()
|
||||
for _, c := range r {
|
||||
cell := row.AddCell()
|
||||
cell.Value = c
|
||||
if i == 0 {
|
||||
cell.GetStyle().Font.Bold = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
64
tablib/tablib_xml.go
Normal file
64
tablib/tablib_xml.go
Normal file
@ -0,0 +1,64 @@
|
||||
package tablib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/agrison/mxj"
|
||||
)
|
||||
|
||||
// XML returns a XML representation of the Dataset as an Exportable.
|
||||
func (d *Dataset) XML() (*Exportable, error) {
|
||||
return d.XMLWithTagNamePrefixIndent("row", " ", " ")
|
||||
}
|
||||
|
||||
// XML returns a XML representation of the Databook as an Exportable.
|
||||
func (d *Databook) XML() (*Exportable, error) {
|
||||
b := newBuffer()
|
||||
b.WriteString("<databook>\n")
|
||||
for _, s := range d.sheets {
|
||||
b.WriteString(" <sheet>\n <title>" + s.title + "</title>\n ")
|
||||
row, err := s.dataset.XMLWithTagNamePrefixIndent("row", " ", " ")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b.Write(row.Bytes())
|
||||
b.WriteString("\n </sheet>")
|
||||
}
|
||||
b.WriteString("\n</databook>")
|
||||
return newExportable(b), nil
|
||||
}
|
||||
|
||||
// XMLWithTagNamePrefixIndent returns a XML representation with custom tag, prefix and indent.
|
||||
func (d *Dataset) XMLWithTagNamePrefixIndent(tagName, prefix, indent string) (*Exportable, error) {
|
||||
back := d.Dict()
|
||||
|
||||
exportable := newExportable(newBuffer())
|
||||
exportable.buffer.WriteString("<dataset>\n")
|
||||
for _, r := range back {
|
||||
m := mxj.Map(r.(map[string]interface{}))
|
||||
if err := m.XmlIndentWriter(exportable.buffer, prefix, indent, tagName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
exportable.buffer.WriteString("\n" + prefix + "</dataset>")
|
||||
|
||||
return exportable, nil
|
||||
}
|
||||
|
||||
// LoadXML loads a Dataset from an XML source.
|
||||
func LoadXML(input []byte) (*Dataset, error) {
|
||||
m, _, err := mxj.NewMapXmlReaderRaw(bytes.NewReader(input))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this seems quite a bit hacky
|
||||
datasetNode, _ := m.ValueForPath("dataset")
|
||||
rowNode := datasetNode.(map[string]interface{})["row"].([]interface{})
|
||||
|
||||
back := make([]map[string]interface{}, 0, len(rowNode))
|
||||
for _, r := range rowNode {
|
||||
back = append(back, r.(map[string]interface{}))
|
||||
}
|
||||
|
||||
return internalLoadFromDict(back)
|
||||
}
|
69
tablib/tablib_yaml.go
Normal file
69
tablib/tablib_yaml.go
Normal file
@ -0,0 +1,69 @@
|
||||
package tablib
|
||||
|
||||
import "gopkg.in/yaml.v2"
|
||||
|
||||
// LoadYAML loads a dataset from a YAML source.
|
||||
func LoadYAML(yamlContent []byte) (*Dataset, error) {
|
||||
var input []map[string]interface{}
|
||||
if err := yaml.Unmarshal(yamlContent, &input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return internalLoadFromDict(input)
|
||||
}
|
||||
|
||||
// LoadDatabookYAML loads a Databook from a YAML source.
|
||||
func LoadDatabookYAML(yamlContent []byte) (*Databook, error) {
|
||||
var input []map[string]interface{}
|
||||
var internalInput []map[string]interface{}
|
||||
if err := yaml.Unmarshal(yamlContent, &input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db := NewDatabook()
|
||||
for _, d := range input {
|
||||
b, err := yaml.Marshal(d["data"])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := yaml.Unmarshal(b, &internalInput); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ds, err := internalLoadFromDict(internalInput); err == nil {
|
||||
db.AddSheet(d["title"].(string), ds)
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// YAML returns a YAML representation of the Dataset as an Exportable.
|
||||
func (d *Dataset) YAML() (*Exportable, error) {
|
||||
back := d.Dict()
|
||||
|
||||
b, err := yaml.Marshal(back)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newExportableFromBytes(b), nil
|
||||
}
|
||||
|
||||
// YAML returns a YAML representation of the Databook as an Exportable.
|
||||
func (d *Databook) YAML() (*Exportable, error) {
|
||||
y := make([]map[string]interface{}, len(d.sheets))
|
||||
i := 0
|
||||
for _, s := range d.sheets {
|
||||
y[i] = make(map[string]interface{})
|
||||
y[i]["title"] = s.title
|
||||
y[i]["data"] = s.dataset.Dict()
|
||||
i++
|
||||
}
|
||||
b, err := yaml.Marshal(y)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newExportableFromBytes(b), nil
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user