sqoop import --connect 'jdbc:sybase:Tds:10.100.*.***:5500/DATABASE=****' --driver 'com.sybase.jdbc3.jdbc.SybDriver' --username "keswara" --password "****" --target-dir "/user/keswara/WT_CONSUMERSTATS" --verbose --query "select c.YEARMONTH as d_stat_yearmonth,b.CONSNO,a.CONSCURRJDGMNTNUM as d_conscurrjdgmntnum,a.CONSCURRDFLTNUM as d_conscurrdfltnum,a.CONSCURRNOTICENUM as d_conscurrnoticenum,a.CONSCURRNOTRLBONDNUM as d_conscurrnotrlbondnum,a.CONSCURRDFLTADMINNUM as d_conscurrdfltadminnum,a.AMNISTYIND as d_amnisty_ind,a.NCRCREDITACTIVE as d_ncr_credit_active_ind,b.ACTIVEIND as d_active_ind,a.ESTINCOME as d_income,'d_create_date' as d_create_date,e.SUBURBCODE as d_physaddrsuburb_code,
a.ENQHARDTOTALNUM as d_enqhardtotalnum,a.ENQSOFTTOTALNUM as d_enqsofttotalnum,a.ENQHARDNUMMONTH as d_enqhardnummonth,a.ENQSOFTNUMMONTH as d_enqsoftnummonth,a.ENQTOTALNUM as d_enqtotalnum,a.ENQTOTALNUMMONTH as d_enqtotalnummonth,'d_cug' as d_cug,a.CCACRWORSTEVER as d_crworstever,a.CCAINSWORSTEVER as d_insworstever,a.CCACRWORSTEVER as d_worstever,a.CCANUMACC as d_numacc,a.CCANUMOPENACC as d_numopenacc,a.CCANUMCLOSEDACC as d_numclosedacc,a.CCANUMNEGCLOSEDACC as d_numnegclosedacc,a.CCANUMPOSCLOSEDACC as d_numposclosedacc,a.CCANUMACTIVEACC as d_numactiveacc,a.CCANUMWRITEOFFS as d_numwriteoffs,a.CCANUMDECEASEDWRITEOFFS as d_numdeceasedwriteoffs,
a.CCANUMHANDEDOVER as d_numhandedover,a.CCANUMCRCARDREVOKE as d_numcrcardrevoke,a.CCANUMREPO as d_numrepo,a.CCATOTALINSTALMENTAMT as d_totalinstalmentamt,a.CCATOTALOPENBAL as d_totalopenbal,a.CCATOTALCURRBAL as d_totalcurrbal,a.CCATOTALOVDUECRAMT as d_totalovduecramt,a.CCATOTALOVDUEDRAMT as d_totalovduedramt,a.CCANUMPAIDUPDEFACC as d_numpaidupdefacc,a.CCANUMSUSPENDEDACC as d_numsuspendedacc,a.CCANUMFROZENACC as d_numfrozenacc,d.SCORE as d_con_no,d.EXCLUSIONCODE as d_exclusion_code,'d_score_date' as d_score_date from dw.FT_CONSUMERSTATS a
inner join dw.DM_CONSUMER as b on a.CONSKEY = b.CONSKEY,inner join dw.DM_MONTH as c on c.MONTHKEY = a.MONTHKEY,inner join dw.FT_CONSUMER_SCORE as d on d.CONSKEY = b.CONSKEY,inner join dw.DM_SUBURB as e on b.SUBURBKEY = e.SUBURBKEY where yearmonth = 201501 AND \$CONDITIONS" --split-by 1 --verbose --create-hive-table --hive-table wt_consumer_stats --hive-import --map-column-hive d_stat_yearmonth=integer,d_con_no=integer,d_conscurrjdgmntnum=integer,d_conscurrdfltnum=integer,d_conscurrnoticenum=integer,d_conscurrnotrlbondnum=integer,d_conscurrdfltadminnum=integer,d_amnisty_ind=string,d_ncr_credit_active_ind=string,d_active_ind=string,d_income=integer,d_create_date=datetime,d_physaddrsuburb_code=integer,d_enqhardtotalnum=integer,d_enqsofttotalnum=integer,
d_enqhardnummonth=integer,d_enqsoftnummonth=integer,d_enqtotalnum=integer,d_enqtotalnummonth=integer,d_cug=string,d_crworstever=string,d_insworstever=string,d_worstever=string,d_numacc=integer,d_numopenacc=integer,d_numclosedacc=integer,d_numnegclosedacc=integer,d_numposclosedacc=integer,d_numactiveacc=integer,d_numwriteoffs=integer,d_numdeceasedwriteoffs=integer,d_numhandedover=integer,d_numcrcardrevoke=integer,d_numrepo=integer,d_totalinstalmentamt=decimal,d_totalopenbal=decimal,d_totalcurrbal=decimal,d_totalovduecramt=decimal,d_totalovduedramt=decimal,d_numpaidupdefacc=integer,d_numsuspendedacc=integer,d_numfrozenacc=integer,d_score_version=string,d_score=integer,d_exclusion_code=string,d_score_date=integer
did you try something like this?
sqoop import --connect 'jdbc:sybase:Tds:10.100.*.***:5500/DATABASE=****' --driver 'com.sybase.jdbc3.jdbc.SybDriver' --username "keswara" --password "****" --target-dir "/user/keswara/WT_CONSUMERSTATS" --verbose --query "select * from (
select c.YEARMONTH as d_stat_yearmonth,b.CONSNO,a.CONSCURRJDGMNTNUM as d_conscurrjdgmntnum,a.CONSCURRDFLTNUM as d_conscurrdfltnum,a.CONSCURRNOTICENUM as d_conscurrnoticenum,a.CONSCURRNOTRLBONDNUM as d_conscurrnotrlbondnum,a.CONSCURRDFLTADMINNUM as d_conscurrdfltadminnum,a.AMNISTYIND as d_amnisty_ind,a.NCRCREDITACTIVE as d_ncr_credit_active_ind,b.ACTIVEIND as d_active_ind,a.ESTINCOME as d_income,'d_create_date' as d_create_date,e.SUBURBCODE as d_physaddrsuburb_code,
a.ENQHARDTOTALNUM as d_enqhardtotalnum,a.ENQSOFTTOTALNUM as d_enqsofttotalnum,a.ENQHARDNUMMONTH as d_enqhardnummonth,a.ENQSOFTNUMMONTH as d_enqsoftnummonth,a.ENQTOTALNUM as d_enqtotalnum,a.ENQTOTALNUMMONTH as d_enqtotalnummonth,'d_cug' as d_cug,a.CCACRWORSTEVER as d_crworstever,a.CCAINSWORSTEVER as d_insworstever,a.CCACRWORSTEVER as d_worstever,a.CCANUMACC as d_numacc,a.CCANUMOPENACC as d_numopenacc,a.CCANUMCLOSEDACC as d_numclosedacc,a.CCANUMNEGCLOSEDACC as d_numnegclosedacc,a.CCANUMPOSCLOSEDACC as d_numposclosedacc,a.CCANUMACTIVEACC as d_numactiveacc,a.CCANUMWRITEOFFS as d_numwriteoffs,a.CCANUMDECEASEDWRITEOFFS as d_numdeceasedwriteoffs,
a.CCANUMHANDEDOVER as d_numhandedover,a.CCANUMCRCARDREVOKE as d_numcrcardrevoke,a.CCANUMREPO as d_numrepo,a.CCATOTALINSTALMENTAMT as d_totalinstalmentamt,a.CCATOTALOPENBAL as d_totalopenbal,a.CCATOTALCURRBAL as d_totalcurrbal,a.CCATOTALOVDUECRAMT as d_totalovduecramt,a.CCATOTALOVDUEDRAMT as d_totalovduedramt,a.CCANUMPAIDUPDEFACC as d_numpaidupdefacc,a.CCANUMSUSPENDEDACC as d_numsuspendedacc,a.CCANUMFROZENACC as d_numfrozenacc,d.SCORE as d_con_no,d.EXCLUSIONCODE as d_exclusion_code,'d_score_date' as d_score_date from dw.FT_CONSUMERSTATS a
inner join dw.DM_CONSUMER as b on a.CONSKEY = b.CONSKEY,inner join dw.DM_MONTH as c on c.MONTHKEY = a.MONTHKEY,inner join dw.FT_CONSUMER_SCORE as d on d.CONSKEY = b.CONSKEY,inner join dw.DM_SUBURB as e on b.SUBURBKEY = e.SUBURBKEY where yearmonth = 201501
) t10 where \$CONDITIONS" --split-by 1 --verbose --create-hive-table --hive-table wt_consumer_stats --hive-import --map-column-hive d_stat_yearmonth=integer,d_con_no=integer,d_conscurrjdgmntnum=integer,d_conscurrdfltnum=integer,d_conscurrnoticenum=integer,d_conscurrnotrlbondnum=integer,d_conscurrdfltadminnum=integer,d_amnisty_ind=string,d_ncr_credit_active_ind=string,d_active_ind=string,d_income=integer,d_create_date=datetime,d_physaddrsuburb_code=integer,d_enqhardtotalnum=integer,d_enqsofttotalnum=integer,
d_enqhardnummonth=integer,d_enqsoftnummonth=integer,d_enqtotalnum=integer,d_enqtotalnummonth=integer,d_cug=string,d_crworstever=string,d_insworstever=string,d_worstever=string,d_numacc=integer,d_numopenacc=integer,d_numclosedacc=integer,d_numnegclosedacc=integer,d_numposclosedacc=integer,d_numactiveacc=integer,d_numwriteoffs=integer,d_numdeceasedwriteoffs=integer,d_numhandedover=integer,d_numcrcardrevoke=integer,d_numrepo=integer,d_totalinstalmentamt=decimal,d_totalopenbal=decimal,d_totalcurrbal=decimal,d_totalovduecramt=decimal,d_totalovduedramt=decimal,d_numpaidupdefacc=integer,d_numsuspendedacc=integer,d_numfrozenacc=integer,d_score_version=string,d_score=integer,d_exclusion_code=string,d_score_date=integer
in the --query ".... where \$CONDITIONS AND yearmonth = 201501"
Related
The Python Part
I have a python application with multiple entrypoints, json_out and json_in. I can run them both with this default.nix
with import <nixpkgs> {};
(
let jsonio = python37.pkgs.buildPythonPackage rec {
pname = "jsonio";
version = "0.0.1";
src = ./.;
};
in python37.withPackages (ps: [ jsonio ])
).env
Like so:
$ nix-shell --run "json_out"
{ "a" : 1, "b", 2 }
$ nix-shell --run "echo { \"a\" : 1, \"b\", 2 } | json_in"
keys: a,b
values: 1,2
The System Part
I want to also invoke jq in the nix shell, like this:
$ nix-shell --run --pure "json_out | jq '.a' | json_in"
But I can't because it is not included. I know that I can include jq into the nix shell using this default.nix
with import <nixpkgs> {};
stdenv.mkDerivation rec {
name = "jsonio-environment";
buildInputs = [ pkgs.jq ];
}
And it works on its own:
$ nix-shell --run --pure "echo { \"a\" : 1, \"b\", 2 } | jq '.a'"
{ "a" : 1 }
But now I don't have my application:
$ nix-shell --run "json_out | jq '.a'"
/tmp/nix-shell-20108-0/rc: line 1: json_out: command not found
The Question
What default.nix file can I provide that will include both my application and the jq package?
My preferred way to achieve this is to use .overrideAttrs to add additional dependencies to the environment like so:
with import <nixpkgs> {};
(
let jsonio = python37.pkgs.buildPythonPackage rec {
pname = "jsonio";
version = "0.0.1";
src = ./.;
};
in python37.withPackages (ps: [jsonio ])
).env.overrideAttrs (drv: {
buildInputs = [ jq ];
})
I needed to:
provide the output of buildPythonPackage as part of the input of mkDerivation
omit the env. Based on a hint from an error message:
Python 'env' attributes are intended for interactive nix-shell
sessions, not for building!
Here's what I ended up with:
with import <nixpkgs> {};
let jsonio_installed = (
let jsonio_module = (
python37.pkgs.buildPythonPackage rec {
pname = "jsonio";
version = "0.0.1";
src = ./.;
}
);
in python37.withPackages (ps: [jsonio_module ])
);
in stdenv.mkDerivation rec {
name = "jsonio-environment";
buildInputs = [ pkgs.jq jsonio_installed ];
}
I follow the guid of spark streaming + flume integration. But i can't get any events in the end.
(https://spark.apache.org/docs/latest/streaming-flume-integration.html)
Can any one help me analysis it?
In the fume, I created the file of "avro_flume.conf" as follows:
Describe/configure the source
a1.sources = r1
a1.channels = c1
a1.sources.r1.type = avro
a1.sources.r1.channels = c1
a1.sources.r1.bind = 123.57.54.113
a1.sources.r1.port = 4141
Describe the sink
a1.sinks = k1
a1.sinks.k1.type = avro
Use a channel which buffers events in memory
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
a1.sinks.k1.hostname = 123.57.54.113
a1.sinks.k1.port = 6666
a1.sources = r1
a1.sinks = spark
a1.channels = c1
In the file , 123.57.54.113 is the ip of localhost.
I start the programing as follows:
1.Start agent
flume-ng agent -c . -f conf/avro_spark.conf -n a1 Start Spark-streaming
2.Start spark-streaming example
bin/run-example org.apache.spark.examples.streaming.FlumeEventCount 123.57.54.113 6666
3.Then I start the avro-cilent
flume-ng avro-client -c . -H 123.57.54.113 -p 4141 -F test/log.01
4.test/log.01" is a file created by echo which contains some string
In the end ,there is no events at all.
What's the problem?
Thanks !
I see "a1.sinks = spark" under heading "Binding the source and sink to the channel". But the sink with name "spark" is not defined elsewhere in your configuration.
Are you trying approach 1 or approach 2 from "https://spark.apache.org/docs/latest/streaming-flume-integration.html"?
Try removing the line "a1.sinks = spark" if you are trying approach 1.
For approach 2 use the following template:
agent.sinks = spark
agent.sinks.spark.type = org.apache.spark.streaming.flume.sink.SparkSink
agent.sinks.spark.hostname = <hostname of the local machine>
agent.sinks.spark.port = <port to listen on for connection from Spark>
agent.sinks.spark.channel = memoryChannel
Consider that CSV file:
Node Name,Client Name,Job Directory,Policy Name
server1,test.domain.com,"vmware:/?filter= VMHostName AnyOf "server2.domain.com", "server3.domain.com"",TEST
My code:
$events = Import-Csv "C:\file.csv" | foreach {
New-Object PSObject -prop #{
Server = $_.{Node Name};
Client = $_.{Client Name};
{JobDirectory/Script} = $_.{Job Directory};
Policy = $_.{Policy Name};
}
}
I have some problems when I try to parse the third field. I am not sure if its because the comma, or the double quote.
This is the object I would like to have:
Node Name : server1
Client Name : test.domain.com
JobDirectory/Script : vmware:/?filter= VMHostName AnyOf "server2.domain.com", "server3.domain.com"
Policy Name : TEST
Can someone help me?
Ok, so the easiest way to approach this is to read the file in with Get-Content and then split each line where the commas are not inside quotes. I borrowed the regex from this solution for this.
Using your current input data I would do something like this
$filedata = Get-Content C:\temp\test.csv
$asObject = ForEach($singlerow in ($filedata | Select-Object -Skip 1)){
$props = #{}
$singlerow = $singlerow -split ',(?=(?:[^"]*"[^"]*")*[^"]*$)'
[pscustomobject][ordered]#{
Server = $singlerow[0]
Client = $singlerow[1]
"JobDirectory/Script" = $singlerow[2]
Policy = $singlerow[3]
}
}
Sample Output from $asObject | Format-List
Server : server1
Client : test.domain.com
JobDirectory/Script : "vmware:/?filter= VMHostName AnyOf "server2.domain.com", "server3.domain.com""
Policy : TEST
Another way using your starting code
$obj = gc c:\temp\test.csv |
% { $_ -replace '"(\b[^"]*\b)"','$1' } |
convertfrom-csv | % { [pscustomobject][ordered] #{
Server = $_.{Node Name}
Client = $_.{Client Name}
{JobDirectory/Script} = $_.{Job Directory}
Policy = $_.{Policy Name} }
}
How can I get user list from a local group? I only have PS 2.0 and it does not have Get-ADGroup command.
I can get local groups:
$adsi = [ADSI]"WinNT://$env:COMPUTERNAME"
$groups = $adsi.Children | Where { $_.SchemaClassName -eq 'Group' }
$group | ft Name
What I need is to list all the members for each group.
You can try the following
$obj = [ADSI]"WinNT://$env:COMPUTERNAME"
$admingroup = $obj.Children | Where { $_.SchemaClassName -eq 'group'} | where {$_.name -eq 'Administrators'}
$admingroup.Invoke('Members') | % {$_.GetType().InvokeMember('Name', 'GetProperty', $null, $_, $null)}
$admingroup.Invoke('Members') | % {$_.GetType().InvokeMember('Name', 'GetProperty', $null, $_, $null)}
Here are the common properties
String :
Description, FullName, HomeDirectory, HomeDirDrive, Profile, LoginScript, ObjectSID
Integer :
UserFlags, PasswordExpired, PrimaryGroupID
Time :
PasswordAge
You'll find more in Microsoft documentation.
Try this
$computer = [ADSI]"WinNT://$env:COMPUTERNAME"
$computer.psbase.children | where { $_.psbase.schemaClassName -eq 'group' } | foreach {
write-host $_.name
write-host "------"
$group =[ADSI]$_.psbase.Path
$group.psbase.Invoke("Members") | foreach {$_.GetType().InvokeMember("Name", 'GetProperty', $null, $_, $null)}
write-host
}
This doesn't give the domain though, hence i had to look for other ways, like:
If you want to see members of a local group quickly:
PS C:\> net localgroup USERS
Alias name USERS
Comment Users are prevented from making accidental or intentional system-wide changes and can run most applications
Members
-------------------------------------------------------------------------------
NT AUTHORITY\Authenticated Users
NT AUTHORITY\INTERACTIVE
The command completed successfully.
Now you can manipulate this output a bit to get what you need:
$computer = [ADSI]"WinNT://$env:COMPUTERNAME"
$groups = $computer.psbase.children | where { $_.psbase.schemaClassName -eq 'group' } | select -ExpandProperty Name
Foreach($group in $groups)
{
write-host $group
write-host "------"
net localgroup $group | where {$_ -notmatch "command completed successfully"} | select -skip 6
Write-host
}
I have an ODBC connection set up on my Windows 2008 Server, and I'm trying to replace some .BAT files that do some processing with Powershell files.
Is there a way to do the same thing as this in PowerShell?
CALL osql /instanceName /Uuser /Ppassword /Q"EXECUTE storedProcName #Parm1= %ePROFILE%, #param2 = N'%eValList%'
SQL Server 2008 provides an osql Powershell cmdlet called invoke-sqlcmd that does that same type of thing as osql from Powershell. That said if you want to continue to use osql you should be able to do something like this and continue to use your Windows users varaialbes:
osql /instanceName /Uuser /Ppassword /Q"EXECUTE storedProcName #Parm1= $env:ePROFILE, #param2 = N'$env:eValList'
If you want an actual Powershell Object to work with after you query a database you can use a function like this that I recently wrote:
function Query-DatabaseTable ( [string] $server , [string] $dbs, [string] $sql )
{
$Columns = #()
$con = "server=$server;Integrated Security=true;Initial Catalog=$dbs"
$ds = new-object "System.Data.DataSet" "DataSet"
$da = new-object "System.Data.SqlClient.SqlDataAdapter" ($con)
$da.SelectCommand.CommandText = $sql
$da.SelectCommand.Connection = $con
$da.Fill($ds) | out-null
$ds.Tables[0].Columns | Select ColumnName | % { $Columns += $_.ColumnName }
$res = $ds.Tables[0].Rows | Select $Columns
$da.Dispose()
$ds.Dispose()
return $res
}