Bulk Update HELP!
Apr 25, 2008
I have an unusual question... I have a table that I have to select the top 300 records from, check the date on each record, and using an if... then... statement, loop through each record updating a field based on criteria.
The problem is that it takes so long to do this. Is there a way to populate an object and pass that object with the data to a SQL stored procedure to have the SQL server do all the updating as opposed to the application doing the record updating? Does that make sense? Here's a sample of the code and you'll see what I'm talking about.
Thanks in advance for any help or advice you can give.
Imports Microsoft.SqlServer.Server
Imports System.Data.SqlClient
Imports CallTracker_AgingCheck
Module CallTracker
Sub Main()
GetWebAgingData()
GetWebCallsData()
End Sub
#Region "Get Data"
Public agingStatusNumber As New ArrayList
Public agingEscalationNumber As New ArrayList
Public callStatusNumber As New ArrayList
Public CallStatus As New ArrayList
Public Sub GetWebCallsData()
Dim dt As New DataTable
Dim Criteria As String = "SELECT TOP 300 CallID, TIMEOFCALL, Status, StatusNumber " & _
"FROM WebCalls " & _
"WHERE (Status = 'OPEN') OR " & _
"(Status = 'IN PROCESS') OR " & _
"(Status = 'WORKORDER') OR " & _
"(Status = 'PRIORITY') OR " & _
"(Status = 'PENDING') " '& _
'"ORDER BY TimeOfCall DESC"
Dim Fa As String = String.Empty
Dim fromDATE As String = String.Empty
Dim toDate As String = String.Empty
Dim ds As New DataSet()
Dim tbl As String = "WebCalls"
Dim x1 As Integer = 0
Try
Using cn As New SqlClient.SqlConnection(Database.SQLConnection)
cn.Open()
Using cm As SqlClient.SqlCommand = cn.CreateCommand()
cm.CommandText = Criteria
cm.CommandType = CommandType.Text
cm.Parameters.AddWithValue("@CALLID", Fa)
cm.Parameters.AddWithValue("@TIMEOFCALL ", Fa)
cm.Parameters.AddWithValue("@STATUS", Fa)
cm.Parameters.AddWithValue("@STATUSNUMBER", Fa)
Dim Age As TimeSpan = Nothing
Dim _totalHours As Integer = Nothing
Dim EscalateTime As Integer = agingEscalationNumber.Count
Dim DAUpdateCmd As SqlCommand
Using da As New SqlDataAdapter(Criteria, cn)
da.Fill(dt)
Dim callID As Integer = Nothing
Dim statNO As Integer = Nothing
Dim toc As Integer = dt.Rows.Count
Dim x As Integer = 0
For x = 0 To toc - 1
Dim y As Date = dt.Rows(x).Item(1) 'gets the date from the first row in WebCalls for comparison with WebAging
Age = Today.Subtract(y)
_totalHours = Age.TotalHours
If _totalHours > 744 Then
Console.WriteLine("This record is older than 30 days")
Else
callID = dt.Rows(x).Item(0)
DAUpdateCmd = New SqlCommand("Update WebCalls SET STATUSNUMBER = @STATUSNUMBER where CALLID = @CALLID", da.SelectCommand.Connection)
DAUpdateCmd.Parameters.Add(New SqlParameter("@STATUSNUMBER", SqlDbType.Int))
DAUpdateCmd.Parameters("@STATUSNUMBER").SourceVersion = DataRowVersion.Current
DAUpdateCmd.Parameters("@STATUSNUMBER").SourceColumn = "STATUSNUMBER"
DAUpdateCmd.Parameters.Add(New SqlParameter("@CALLID", SqlDbType.Int))
DAUpdateCmd.Parameters("@CALLID").SourceVersion = DataRowVersion.Original
DAUpdateCmd.Parameters("@CALLID").SourceColumn = "CALLID"
da.UpdateCommand = DAUpdateCmd
da.Fill(ds, tbl)
Dim z As Integer = 0
Dim _stat0 As Integer = agingEscalationNumber(0)
Dim _stat1 As Integer = agingEscalationNumber(1)
Dim _stat2 As Integer = agingEscalationNumber(2)
Dim _stat3 As Integer = agingEscalationNumber(3)
Dim StatusNo_0 As Integer = agingStatusNumber(0)
Dim StatusNo_1 As Integer = agingStatusNumber(1)
Dim StatusNo_2 As Integer = agingStatusNumber(2)
Dim StatusNo_3 As Integer = agingStatusNumber(3)
If _totalHours <= _stat0 Then
Try
ds.Tables(tbl).Rows(3)("STATUSNUMBER") = StatusNo_0
da.Update(ds, tbl)
Console.WriteLine("Status Number: " & Str(StatusNo_0) & " callID = " & callID)
Catch ex As Exception
Console.WriteLine(ex)
End Try
End If
If _totalHours >= (_stat0 + 1) And _totalHours <= _stat1 Then
Try
ds.Tables(tbl).Rows(3)("STATUSNUMBER") = StatusNo_1
da.Update(ds, tbl)
Console.WriteLine("Status Number: " & Str(StatusNo_1) & " callID = " & callID)
Catch ex As Exception
Console.WriteLine(ex)
End Try
End If
If _totalHours >= (_stat1 + 1) And _totalHours <= _stat2 Then
Try
ds.Tables(tbl).Rows(3)("STATUSNUMBER") = StatusNo_2
da.Update(ds, tbl)
Console.WriteLine("Status Number: " & Str(StatusNo_2) & " callID = " & callID)
Catch ex As Exception
Console.WriteLine(ex)
End Try
End If
If _totalHours >= (_stat2 + 1) And _totalHours <= _stat3 Then
Try
ds.Tables(tbl).Rows(3)("STATUSNUMBER") = StatusNo_3
da.Update(ds, tbl)
Console.WriteLine("Status Number: " & Str(StatusNo_3) & " callID = " & callID)
Catch ex As Exception
Console.WriteLine(ex)
End Try
Else
End If
End If
Next
End Using
End Using
End Using
Catch ex As Exception
Console.WriteLine(ex)
End Try
End Sub
Public Sub GetWebAgingData()
Dim dt As New DataTable
Dim Fa As String = String.Empty
Dim fromDATE As String = String.Empty
Dim toDate As String = String.Empty
Dim Criteria As String = "Select RecordID, StatusNumber, EscalationTime from WebAging" 'create a selection statement"
'don't forget the email addys for sending based on escalation rates.
Dim x1 As Integer = 0
Try
Using cn As New SqlClient.SqlConnection(Database.SQLConnection)
cn.Open()
Using cm As SqlClient.SqlCommand = cn.CreateCommand()
cm.CommandText = Criteria
cm.CommandType = CommandType.Text
cm.Parameters.AddWithValue("@RecordID", Fa)
cm.Parameters.AddWithValue("@StatusNumber ", Fa)
cm.Parameters.AddWithValue("@EscalationTime", Fa)
Using da As New SqlClient.SqlDataAdapter(cm)
da.Fill(dt)
Dim dtCnt As Integer = dt.Rows.Count
Dim x As Integer = Nothing
Dim escalateTime As Integer = Nothing
Dim _StatusNumber As Integer = Nothing
For x = 0 To dtCnt - 1
escalateTime = dt.Rows(x).Item(2)
_StatusNumber = dt.Rows(x).Item(1)
agingEscalationNumber.Add(escalateTime)
agingStatusNumber.Add(_StatusNumber)
Next x
End Using
End Using
End Using
Catch ex As Exception
Console.WriteLine(ex)
End Try
End Sub
#End Region
#Region "Get Status Information From WebCalls"
#End Region
End Module
View 4 Replies
ADVERTISEMENT
Sep 27, 2007
I have to update a field within a table of 60 records or so. Each record has a different field value. it's type varchar. i was given an excel file with the field values and was thinking of a bulk update like bulk insert, but i don't recall that it's possible that way.
Is the only way to create a table, bulk insert, then merge the two tables together with UPDATE?
Just wanted to see if there was an easier way to do it, otherwise i'll take the latter route. Thanks!
View 1 Replies
View Related
Feb 5, 2008
I am currently wrapping up a website upgrade for a client and I am working on a development server/database. The development server/database will become the live version. When the upgrade goes live, I will need to update that database with the latest data from specific datatables (no all of them) in the previously live database, but I don't know how to do a bulk refresh of datatables.
Problem: specific datatables (not all datatables) from Database1 need to be updated with the data from Database2. Database1 and Database2 are copies of each other with vast differences in some of the data.
Result: All of the current, up-to-date data needs to reside on Database1.
Solution: Any ideas?
I am using MSSQL 2000 and the databases reside on the same server.
View 1 Replies
View Related
Mar 8, 2006
We are planning to add a new attribute to one of our tables to speed updata access. Once the attribute is added, we will need to populatethat attribute for each of the records in the table.Since the table in question is very large, the update statement istaking a considerable amount of time. From reading through old postsand Books Online, it looks like one of the big things slowing down theupdate is writing to the transaction log.I have found mention to "truncate log on checkpoint" and using "SETROWCOUNT" to limit the number of rows updated at once. Or "dumptransaction databaseName with No_Log".Does anyone have any opinions on these tactics? Please let me know ifyou want more information about the situation in order to provide ananswer!
View 3 Replies
View Related
Aug 11, 2006
[EDIT #2]
Using this query:
Code:
INSERT INTO Users (userName, UserSalt, UserHash1, UserHash2, UT_memberID)
select memberFirstName + '.' + memberLastName + '56' as userName, '{AxxxxxDE-6xx6-4xxD-Bxx9-3xxxx79xxxxE}',
'{4xxxxxx6-8xx5-6xxD-Cxx6-4xxxFxxx1xx9}', '{0xxx8xxE-Cxx4-6xx8-ExxB-Dxxxx4xxx2xC}', members.memberID
From members
Inner Join groupLeaders ON members.memberID = groupLeaders.memberID
SELECT @@Identity AS UserID
How can I modify the portion that is inserting the '56' at the end of each username to do the following:
1) check to see if username already exists in the database (using a query with "LIKE %'")
2) if not, create the username "as-is" or how it should be without the number
3) if already exists, get a count of records matching your search criteria .... now make a new username + + (count + 1).ToString();
Any thoughts... I am struggling to put these two pieces together.
Thanks,
Zoop
[EDIT - original post below this]
I have modified my method to make this a bit easier. I added a memberID field to my [Users] table so that I can update my [Members] table in a difference statement after the insert takes place.
I have the following query, and it completes succesfully in query analyzer (though I haven't actually executed the SP, just testing the syntax...) anyway, here is what I have:
Code:
INSERT INTO Users (userName, UserSalt, UserHash1, UserHash2, UT_memberID)
select memberFirstName + '.' + memberLastName + '56' as userName, '{AxxxxxDE-6xx6-4xxD-Bxx9-3xxxx79xxxxE}',
'{4xxxxxx6-8xx5-6xxD-Cxx6-4xxxFxxx1xx9}', '{0xxx8xxE-Cxx4-6xx8-ExxB-Dxxxx4xxx2xC}', members.memberID
From members
Inner Join groupLeaders ON members.memberID = groupLeaders.memberID
SELECT @@Identity AS UserID
I am hoping this will create a user for all members whose 'memberID' can be found in the groupLeaders table... is this correct?
Also, notice the 56 being appended to the end of each username. I would like this to be a random number generated within a given range... can this be done? any advice?
Thanks,
Zoop
[Original post below - provide more background]
I have three tables involved with this insert/update:
[Members]
-memberID
-memberFirstName
-memberLastName
-UserID
[GroupLeaders]
-groupLeaderID
-memberID
[Users]
-UserID
-Username
-UserSalt
-UserHash1
-UserHash2
I want to insert into the [Users] table the memberFirstName.memberLastName + randomNum into the 'UserName' column from the [Members] table. Also, I want to make all passwords the same, in this case I know the Salt, Hash1, Hash2 I will be using and would like to pass these in for the 'UserHash1' 'UserHash2' fields.
Now, I only want to make this insert where the memberID is in the GroupLeaders table. and Finally, I need to Update my Members table with a UserID where the memberID matches the one used from the groupLeaders table.
Does anyone have any ideas on how I can accomplish this, even if it requires adding a temporary field to one of my tables... here is what I have so far, but am recieving errors and can't quite figure this one out. (btw - I also don't know how to gen the rand num and was using the literal 23 as a placeholder.) Thanks...
Code:
INSERT INTO Users (userName, UserSalt, UserHash1, UserHash2)
select a.memberFirstName + '.' + a.memberLastName + '23' + as userName, '{AA99FCDE-6E06-437D-B9E9-3E3D27955C3E}',
'{7xxxxxx2-4xx6-9xx1-7xx9-4x3xx4Axxx59}', '{0xx8xxE-Cxx4-6xxx-xxxx-Fxx3xxxx3xxF}', b.memberID as newMemID
From members a, groupLeaders b
Where a.memberID = b.memberID
SELECT @@Identity AS UserID
Update Members Set UserID = Ident_Current('Users')
where memberID = newMemID
Any help is appreciated!
View 2 Replies
View Related
Apr 8, 2004
I need a fresh set of eyes.
On a daily basis I need to perform a bulk update. Table totals about 50,000 records with approximately 5,000 changing (deletes, edits, and new records) per day. I'd like to push just the updates somehow, but VB is too slow and I haven't found a way in to handle it in DTS. Not much experience w/ DTS.
I'm transfering between two SQL 2000 servers w/ a VB app sitting in the middle.
Any ideas?
View 1 Replies
View Related
Feb 13, 2008
Hello,
I am trying to read in from a csv file which works like this:
DECLARE @doesExist INT
DECLARE @fileName VARCHAR(200)
SET @fileName = 'c:file.csv'
SET NOCOUNT ON
EXEC xp_fileexist "' + @fileName + '", @doesExist OUTPUT
SET NOCOUNT OFF
IF @doesExist = 1
BEGIN
BULK INSERT OrdersBulk
FROM "' + @fileName + '"
WITH
(
FIELDTERMINATOR = ',',
ROWTERMINATOR = ''
)
END
ELSE
print('Error cant find file')
What I want to do is check another table before each line inserts, if the data already exists I want to do an UPDATE.
I think i can do what i need with a cursor but I think the bulk update just pushes all the data up and will not allow me to put in the cursor.
So is there a way i can read the csv in a cursor instead of using the bulk insert so i can examine each row?
View 5 Replies
View Related
Apr 17, 2007
We need to import data from flat/xml files into our database.
we plan to do so in bulk as amount of data is huge, 2GB+
we need to do some validation checks in our code after that we create insert queries.
We have identity columns that are used as foreign keys in child tables. Question is how can I write a bulk/batch insert statement that will propogate the identity column to the child, as for all other we are creating the query in the application memory.
there are 2 parent tables and 1st table value needs to be referred to in 7 tables and second table's value in 6.
Thanks much for your help.
View 1 Replies
View Related
Oct 30, 2015
I have Three tables Student,Daily_Attendance_Master and Daily_Attendence_Details.
I want to run sql of insert or update of student attendence(apsent or present) in Daily_Attendence_Details based on Daily_Attendance_Master_Id and Student_Id(from one roll number to another).
If Both are present in table Daily_Attendence_Details then i want to run Updating of attendance from one roll number to another roll number in Daily_Attendence_Details on the basis of Daily_Attendence_Details_Id
And if both or any one is not present i want to run insert of student attendense from one roll number to another roll number in Daily_Attendence_Details.
I give below the structure of three tables Student,Daily_Attendance_Master and Daily_Attendance_Details.
Student:-
CREATE TABLE [dbo].[Student](
[Student_Id] [bigint] IDENTITY(1,1) NOT NULL,
[Course_Id] [smallint] NULL,
[Class_Id] [int] NULL,
[Batch_Year] [varchar](20) NULL,
[Student_Initials] [varchar](20) NULL,
[Code] ....
View 13 Replies
View Related
Feb 15, 2008
(Hope this isn't a "stupid" question, but I haven't been able to find a straight-forward answer anywhere)"
I currently have code that iterates through a dataview's records, making a change to a field in some of the records. The way I have this coded, a conection has to opened & closed for each individual record that's updated:
dsrcUserIae.UpdateCommand = "UPDATE UserIAE SET blnCorrect = @blnCorrect WHERE (ID = @ID)" dsrcUserIae.UpdateParameters.Add("blnCorrect", SqlDbType.Bit) dsrcUserIae.UpdateParameters.Add("ID", SqlDbType.Int)
Dim myDataView As DataView = CType(dsrcUserIae.Select(DataSourceSelectArguments.Empty), DataView) For Each myRow As DataRowView In myDataView If myRow("FkUsersAnswerID") = myRow("AnswerID") Then intCorrect = 1 Else intCorrect = 0 End If dsrcUserIae.UpdateParameters.Item("blnCorrect").DefaultValue = intCorrect dsrcUserIae.UpdateParameters.Item("ID").DefaultValue = myRow("ID") intUpdateResult = dsrcUserIae.Update() Next
It seems like I should be able to do something like this (call update once), but I'm not sure how...
dsrcUserIae.UpdateCommand = "UPDATE UserIAE SET blnCorrect = @blnCorrect WHERE (ID = @ID)" dsrcUserIae.UpdateParameters.Add("blnCorrect", SqlDbType.Bit) dsrcUserIae.UpdateParameters.Add("ID", SqlDbType.Int)
Dim myDataView As DataView = CType(dsrcUserIae.Select(DataSourceSelectArguments.Empty), DataView) For Each myRow As DataRowView In myDataView If myRow("FkUsersAnswerID") = myRow("AnswerID") Then myRow("blnCorrect") = 1 Else myRow("blnCorrect") = False End If Next
intUpdateResult = dsrcUserIae.Update() 'Want all changed myRow("blnCorrect") to be updated to datasource
Can anybody explain how to do the bulk update? I've seen some info about AcceptChanges and Merge, but I'm not sure if they apply here, or if they more for Transactions.
View 1 Replies
View Related
Feb 8, 2004
I have to modify the table structure where the table have a lot of data already. The log is getting full due to uncommitted transactions, there is a lot of data being updated in large bulks, not all of the transactions are committed, the update task cannot be completed.
However, there is no more spare disk space for it to commit the transaction. Anyone can help?
View 2 Replies
View Related
Mar 16, 2015
I am trying my first bulk update to an existing SWL table from a CSV text file,The text file naming is exacrtly the same as the SQL table, with the same attributes
The statements:
BULK INSERT [Jedox_prod].[dbo].[B_BP_Customer]
FROM 'c:Baanjedox_dailyjdcom4401.txt'
WITH
[code]....
The error message is:
[size=1Msg 4864, Level 16, State 1, Line 1
Bulk load data conversion error (type mismatch or invalid character for the specified codepage) for row 2, column 3 (BP_Country).
Msg 7399, Level 16, State 1, Line 1
The OLE DB provider "BULK" for linked server "(null)" reported an error. The provider did not give any information about the error.
Msg 7330, Level 16, State 2, Line 1
Cannot fetch a row from OLE DB provider "BULK" for linked server "(null)".size=1]..The have checked and re-checked the BP_Country field ( the 1st field after the key) and I am not seeing any mismatches.
View 5 Replies
View Related
Oct 16, 2015
I'm having issues with bulk update in SQL Server.I'm using SAP BODS as ETL tool and have some 20000 updates.target table has approx 0.5 million records and it has clustered index on id column.I have selected upsert option in BODS.Same setup is also done for Sybase IQ , IQ has bulk update option which is giving very ood performance.
In IQ same update load is finishing in some 9 minutes where SQL is taking more than 2 hours for same, this doesn't seem right.When I look at update is causing whole package to go slow.Sybase is creating query where is ID is present then do update else insert.Is there any way to make bulk update work faster in SQL environment?
View 2 Replies
View Related
Jul 22, 2015
I have come up with an issue where I want to update data in a table using bulk/SET update to get the result shown in below code with output in column titled "Arrear Amt".
Please use this test data.
CREATE TABLE ##vOD_Calc
(
Seq_No INT ,
Contract_id INT ,
Rental_id INT ,
Actual_OD INT ,
Logic_OD INT ,
Due_dte DATETIME ,
[Code] .....
Logic required is that once the sum of column [ArrearAmt] of current row and all previous rows becomes greater than $100 then column [ChArrrearAmt] should show that summed up value and in else case the column [ChArrrearAmt] should show the same value as that of column [ArrearAmt].
Once the column [ChArrrearAmt] reaches the threshold of $100 then the same cycle should start again i.e. in above example rental#1 had $37.17 < $100 then rental#1 + rental#2 is also < $100 and at rental#3 sum of rental#1, rental#2 and rental#3 becomes $111.51 which is greater than $100 so its updated in column [CHArrrearAmt]. The same cycle start overs from rental#4 onwards however the summation of [ArrearAmt] will now begin after rental#4 onwards and not from the starting.
Below is the loop based SQL script which handles the above situation, however in BULK its a total deterioration of performance if thousands of rows are to be processed i.e. with a contract having multiple rentals.
The case here is that I have to use the result of previously updated column value of [ChArrrearAmt] to take decision for the next row, however with BULK update since the row is not yet updated with latest amount therefore the decision on next row is also giving wrong result.
This is the code with which I have achieved to update the column 'chArrear Amount', however its a loop based solution and performance killer.
INSERT INTO ##vOD_Calc_loop
( Rows_count ,
contract_id
)
SELECT COUNT(*) ,
T.Contract_id
FROM ##vOD_Calc T
GROUP BY T.Contract_id
[Code] ....
View 4 Replies
View Related
Nov 18, 2015
I have a fundamental problem with how CDC works for bulk updates.When CDC enabled table is updated for single row - My CDC system tables its recording it as update (3 & 4) which is perfect and what it should be. No Complains!But when I do a bulk update in the same CDC enabled tables for the same columns - My CDC system tables its recording as delete and then insert (1 & 2). This is not correct and this is what my problem is. We used triggers before CDC we did not face this problem with triggers every thing was fine with triggers other than performance.The way how the CDC is handling the bulk update is a big problem for me because based on the output of CDC system tables we are doing some migration work to legacy system.
It will be impossible for me to go and change my migration logic scripts because we have 100's or procedures in it.Is it a know problem with CDC? Is there any solution in CDC when a bulk update happens on a table the CDC system tables record it as updates. I don't think CDC 'net changes' in this situation because the net change would show as single inserted row.If this can't be done with CDC then I have to completely abandon CDC and go back to triggers..
View 5 Replies
View Related
Jun 9, 2015
I am getting the below error message while performing Bulk Insert/Update operation.
Could not allocate space for object 'dbo.pros_mas_det'.'PK__pros_mas__3213E83F22401542' in database 'admin_mbjobslive' because the 'PRIMARY' filegroup is full. Create disk space by deleting unneeded files, dropping objects in the filegroup, adding additional files to the filegroup, or setting autogrowth on for existing files in the filegroup.
My Current SQL Server version :
Microsoft SQL Server 2008 R2 (RTM) - 10.50.1600.1 (X64) Apr 2 2010 15:48:46 Copyright (c) Microsoft Corporation Express Edition with Advanced Services (64-bit) on Windows NT 6.1 <X64> (Build 7601: Service Pack 1) (Hypervisor)
My current database size crossed the limit size of 10 GB.
View 4 Replies
View Related
Apr 21, 2015
I am currently working with C and SQL Server 2012. My requirement is to Bulk fetch the records and Insert/Update the same in the other table with some business logic? How do i do this?
View 14 Replies
View Related
Nov 14, 2007
I have a web form with a text field that needs to take in as much as the user decides to type and insert it into an nvarchar(max) field in the database behind. I've tried using the new .write() method in my update statement, but it cuts off the text after a while. Is there a way to insert/update in SQL 2005 this without resorting to Bulk Insert? It bloats the transaction log and turning the logging off requires a call to sp_dboptions (or a straight-up ALTER DATABASE), which I'd like to avoid if I can.
View 6 Replies
View Related
Jan 17, 2008
Im having some issues with bulk insert.
This is the table:
CREATE TABLE [dbo].[tmp_GA_status](
[GA_recno] [int] NOT NULL,
[GA_desc] [varchar](40) NULL
)
This is the file (unicode):
1|"test1"
2|"test2"
3|"test3"
4|"test4"
5|"test5"
6|"test6"
7|"test7"
8|"test8"
and this is the sql:
bulk insert tmp_GA_status from 'C: empTextDumpGA_status.dta'
with (CODEPAGE='RAW', FIELDTERMINATOR='|', ROWTERMINATOR='
', DATAFILETYPE='widechar')
so yeah, pretty simple. But whatever I do I get this;
Msg 4864, Level 16, State 1, Line 1
Bulk load data conversion error (type mismatch or invalid character for the specified codepage) for row 1, column 2 (GA_desc).
So what am I doing wrong ?
View 13 Replies
View Related
Oct 11, 2000
I have a table containing 8 million records.
I need to replace 2 million of these records with
a scaled down query that goes something like:
SELECT 1, ShareholderID, Assets1
FROM MyTable (Yields appx. 200,000 recods)
SELECT 2, ShareholderID, Assets2
FROM MyTable (Yields appx. 200,000 recods)
.
.
.
SELECT 10, ShareholderID, Assets1 + Assest2 + Assets3 + ... + Assets9
FROM MyTable (Yields appx. 200,000 recods)
Updates and cursors just seem to be too slow.
So far I have done the following, but was wondering if anyone could think of a better way.
SELECT 6 million records that don't need to be deleted into a #TempTable
Use statements above to select into same #TempTable
DROP and recreate Original Table
SELECT 6 + 2 million records INTO original table.
This seems rather convoluted. Is there a better approach? Would it be worth while to dump data to a file and use bcp / Bulk Insert
Any comments are appreciated,
-Marc
View 3 Replies
View Related
Jun 29, 2015
I'm trying to use Bulk insert for the first time and getting the following error. I think it might have something to do with my Format File and from the error msg there's a conversion error for the first column. In my database the Field is nvarchar(6) so my best guess is to use SQLNChar for the first column. I've checked the end of each line is CR LF therefore the is correct for line 7 right?
Msg 4863, Level 16, State 1, Line 1
Bulk load data conversion error (truncation) for row 1, column 1 (ASXCode).
Msg 7399, Level 16, State 1, Line 1
The OLE DB provider "BULK" for linked server "(null)" reported an error. The provider did not give any information about the error.
Msg 7330, Level 16, State 2, Line 1
Cannot fetch a row from OLE DB provider "BULK" for linked server "(null)".
BULK
INSERTtbl_ASX_Data_temp
FROM
'M:DataASXImportTest.txt'
WITH
(FORMATFILE='M:DataASXSQLFormatImport.Fmt')
[code]...
View 5 Replies
View Related
Feb 1, 2007
Hi~,
Before implementing memory based bulk copy insert with IRowsetFastLoad interface of SQL Server 2005 OLE DB provider, I want to know some considerations.
- performance : compared with T-SQL's "BULK INSERT ..." and bcp utility
- SQL Server's resource usage : when running memory based bulk copy, server resource's influence
- server side action(behavior) : when server is busy, delayed-update means IRowsetFastLoad::Commit(true) method can insert right after?
- row-count : The rowcount limitation can be inserted by IRowsetFastLoad::InsertRow() method before IRowsetFastLoad::Commit
- any other guide lines
View 1 Replies
View Related
Apr 18, 2008
Hello,
I'm just learning SSIS and I've hit my first bump. I am doing a bulk import from a tab delimited text file to an empty sql table that has a Idendity column defined. How do I tell the bulk insert task to skip that column when inserting from the text file. If I remove the identity column it imports the data fine, but I want to create the indentity column in the table too.
Thanks.
View 8 Replies
View Related
Feb 15, 2007
Hi~, I have 3 questions about memory based bulk copy.
1. What is the limitation count of IRowsetFastLoad::InsertRow() method before IRowsetFastLoad::Commit(true)?
For example, how much insert row at below sample?(the max value of nCount)
for(i=0 ; i<nCount ; i++)
{
pIFastLoad->InsertRow(hAccessor, (void*)(&BulkData));
}
2. In above code sample, isn't there method of inserting prepared array at once directly(BulkData array, not for loop)
3. In OLE DB memory based bulk copy, what is the equivalent of below's T-SQL bulk copy option ?
BULK INSERT database_name.schema_name.table_name FROM 'data_file' WITH (ROWS_PER_BATCH = rows_per_batch, TABLOCK);
-------------------------------------------------------
My solution is like this. Is it correct?
// CoCreateInstance(...);
// Data source
// Create session
m_TableID.uName.pwszName = m_wszTableName;
m_TableID.eKind = DBKIND_NAME;
DBPROP rgProps[1];
DBPROPSET PropSet[1];
rgProps[0].dwOptions = DBPROPOPTIONS_REQUIRED;
rgProps[0].colid = DB_NULLID;
rgProps[0].vValue.vt = VT_BSTR;
rgProps[0].dwPropertyID = SSPROP_FASTLOADOPTIONS;
rgProps[0].vValue.bstrVal = L"ROWS_PER_BATCH = 10000,TABLOCK";
PropSet[0].rgProperties = rgProps;
PropSet[0].cProperties = 1;
PropSet[0].guidPropertySet = DBPROPSET_SQLSERVERROWSET;
if(m_pIOpenRowset)
{
if(FAILED(m_pIOpenRowset->OpenRowset(NULL,&m_TableID,NULL,IID_IRowsetFastLoad,1,PropSet,(LPUNKNOWN*)&m_pIRowsetFastLoad)))
{
return FALSE;
}
}
else
{
return FALSE;
}
View 6 Replies
View Related
Nov 2, 2007
Does anyone know how to do a bulk insert using just the script task? I've been searching everyehere but can't seem to find a sample.
View 6 Replies
View Related
Apr 8, 2008
I receive the following error message when I try to use the Bulk Insert Task to load BCP data into a table:
Error: 0xC002F304 at Bulk Insert Task, Bulk Insert Task: An error occurred with the following error message: "Cannot fetch a row from OLE DB provider "BULK" for linked server "(null)".The OLE DB provider "BULK" for linked server "(null)" reported an error. The provider did not give any information about the error.The bulk load failed. The column is too long in the data file for row 1, column 4. Verify that the field terminator and row terminator are specified correctly.Bulk load data conversion error (overflow) for row 1, column 1 (rowno).".
Task failed: Bulk Insert Task
In SSMS I am able to issue the following command and the data loads into a TableName table with no error messages:
BULK INSERT TableName
FROM 'C:DataDbTableName.bcp'
WITH (DATAFILETYPE='widenative');
What configuration is required for the Bulk Insert Task in SSIS to make the data load? BTW - the TableName.bcp file is bulk copy file as bcp widenative data type. The properties of the Bulk Insert Task are the following:
DataFileType: DTSBulkInsert_DataFileType_WideNative
RowTerminator: {CR}{LF}
Any help getting the bcp file to load would be appreciated. Let me know if you require any other information, thanks for all your help.
Paul
View 1 Replies
View Related
Jan 2, 2008
What's up with this?
This takes like 0 secs to complete:
update xxx_TableName_xxx
set d_50 = 'DE',modify_timestamp = getdate(),modified_by = 1159
where enc_id in
('C24E6640-D2CC-45C6-8C74-74F6466FA262',
'762E6B26-AE4A-4FDB-A6FB-77B4782566C3',
'D7FBD152-F7AE-449C-A875-C85B5F6BB462')
but From linked server this takes 8 minutes????!!!??!:
update [xxx_servername_xxxx].xxx_DatabaseName_xxx.dbo.xxx_TableName_xxx
set d_50 = 'DE',modify_timestamp = getdate(),modified_by = 1159
where enc_id in
('C24E6640-D2CC-45C6-8C74-74F6466FA262',
'762E6B26-AE4A-4FDB-A6FB-77B4782566C3',
'D7FBD152-F7AE-449C-A875-C85B5F6BB462')
What settings or whatever would cause this to take so much longer from the linked server?
Edit:
Note) Other queries from the linked server do not have this behavior. From the stored procedure where we have examined how long each query/update takes... this particular query is the culprit for the time eating. I thought it was to do specefically with this table. However as stated when a query window is opened directly onto that server the update takes no time at all.
2nd Edit:
Could it be to do with this linked server setting?
Collation Compatible
right now it is set to false? I also asked this question in a message below, but figured I should put it up here.
View 5 Replies
View Related
Nov 11, 2007
I am hoping someone can shed light on this odd behavior I am seeing running a simple UPDATE statement on a table in SQL Server 2000. I have 2 tables - call them Table1 and Table2 for now (among many) that need to have certain columns updated as part of a single transaction process. Each of the tables has many columns. I have purposely limited the target column for updating to only ONE of the columns in trying to isolate the issue. In one case the UPDATE runs fine against Table1... at runtime in code and as a manual query when run in QueryAnalyzer or in the Query window of SSManagementStudio - either way it works fine.
However, when I run the UPDATE statement against Table2 - at runtime I get rowsaffected = 0 which of course forces the code to throw an Exception (logically). When I take out the SQL stmt and run it manually in Query Analyzer, it runs BUT this is the output seen in the results pane...
(0 row(s) affected)
(1 row(s) affected)
How does on get 2 answers for one query like this...I have never seen such behavior and it is a real frustration ... makes no sense. There is only ONE row in the table that contains the key field passed in and it is the same key field value on the other table Table1 where the SQL returns only ONE message (the one you expect)
(1 row(s) affected)
If anyone has any ideas where to look next, I'd appreciate it.
Thanks
View 2 Replies
View Related
Feb 16, 2006
Hi SQL fans,I realized that I often encounter the same situation in a relationdatabase context, where I really don't know what to do. Here is anexample, where I have 2 tables as follow:__________________________________________ | PortfolioTitle|| Portfolio |+----------------------------------------++-----------------------------+ | tfolio_id (int)|| folio_id (int) |<<-PK----FK--| tfolio_idfolio (int)|| folio_name (varchar) | | tfolio_idtitle (int)|--FK----PK->>[ Titles]+-----------------------------+ | tfolio_weight(decimal(6,5)) |+-----------------------------------------+Note that I also have a "Titles" tables (hence the tfolio_idtitlelink).My problem is : When I update a portfolio, I must update all theassociated titles in it. That means that titles can be either removedfrom the portfolio (a folio does not support the title anymore), addedto it (a new title is supported by the folio) or simply updated (atitle stays in the portfolio, but has its weight changed)For example, if the portfolio #2 would contain :[ PortfolioTitle ]id | idFolio | idTitre | poids1 2 1 102 2 2 203 2 3 30and I must update the PortfolioTitle based on these values :idFolio | idTitre | poids2 2 202 3 352 4 40then I should1 ) remove the title #1 from the folio by deleting its entry in thePortfolioTitle table2 ) update the title #2 (weight from 30 to 35)3 ) add the title #4 to the folioFor now, the only way I've found to do this is delete all the entriesof the related folio (e.g.: DELETE TitrePortefeuille WHERE idFolio =2), and then insert new values for each entry based on the new givenvalues.Is there a way to better manage this by detecting which value has to beinserted/updated/deleted?And this applies to many situation :(If you need other examples, I can give you.thanks a lot!ibiza
View 8 Replies
View Related
Oct 20, 2006
The Folowing code is not working anymore. (500 error)
Set objRS = strSQL1.Execute
strSQL1 = "SELECT * FROM BannerRotor where BannerID=" & cstr(BannerID)
objRS.Open strSQL1, objConn , 2 , 3 , adCmdText
If not (objRS.BOF and objRS.EOF) Then
objRS.Fields("Exposures").Value =objRS.Fields("Exposures").Value + 1
objRS.update
End If
objRS.Close
The .execute Method works fine
strSQL1 = "UPDATE BannerRotor SET Exposures=Exposures+1 WHERE BannerID=" & cstr(BannerID)
objConn.Execute strSQL1
W2003 + IIS6.0
Pls advice?
View 1 Replies
View Related
Apr 16, 2015
If I have a table with 1 or more Nullable fields and I want to make sure that when an INSERT or UPDATE occurs and one or more of these fields are left to NULL either explicitly or implicitly is there I can set these to non-null values without interfering with the INSERT or UPDATE in as far as the other fields in the table?
EXAMPLE:
CREATE TABLE dbo.MYTABLE(
ID NUMERIC(18,0) IDENTITY(1,1) NOT NULL,
FirstName VARCHAR(50) NULL,
LastName VARCHAR(50) NULL,
[Code] ....
If an INSERT looks like any of the following what can I do to change the NULL being assigned to DateAdded to a real date, preferable the value of GetDate() at the time of the insert? I've heard of INSTEAD of Triggers but I'm not trying tto over rise the entire INSERT or update just the on (maybe 2) fields that are being left as null or explicitly set to null. The same would apply for any UPDATE where DateModified is not specified or explicitly set to NULL. I would want to change it so that DateModified is not null on any UPDATE.
INSERT INTO dbo.MYTABLE( FirstName, LastName, DateAdded)
VALUES('John','Smith',NULL)
INSERT INTO dbo.MYTABLE( FirstName, LastName)
VALUES('John','Smith')
INSERT INTO dbo.MYTABLE( FirstName, LastName, DateAdded)
SELECT FirstName, LastName, NULL
FROM MYOTHERTABLE
View 9 Replies
View Related
May 27, 2008
hi need help how to send an email from database mail on row update
from stored PROCEDURE multi update
but i need to send a personal email evry employee get an email on row update
like send one after one email
i use FUNCTION i get on this forum to use split from multi update
how to loop for evry update send an single eamil to evry employee ID send one email
i update like this
Code Snippet
:
DECLARE @id nvarchar(1000)
set @id= '16703, 16704, 16757, 16924, 17041, 17077, 17084, 17103, 17129, 17134, 17186, 17190, 17203, 17205, 17289, 17294, 17295, 17296, 17309, 17316, 17317, 17322, 17325, 17337, 17338, 17339, 17348, 17349, 17350, 17357, 17360, 17361, 17362, 17366, 17367, 17370, 17372, 17373, 17374, 17377, 17380, 17382, 17383, 17385, 17386, 17391, 17392, 17393, 17394, 17395, 17396, 17397, 17398, 17400, 17401, 17402, 17407, 17408, 17409, 17410, 17411, 17412, 17413, 17414, 17415, 17417, 17418, 17419, 17420, 17422, 17423, 17424, 17425, 17426, 17427, 17428, 17430, 17431, 17432, 17442, 17443, 17444, 17447, 17448, 17449, 17450, 17451'
UPDATE s SET fld5 = 2
FROM Snha s
JOIN dbo.udf_SplitList(@id, ',') split
ON split.value = s.na
WHERE fld5 = 3
now
how to send an EMAIL for evry ROW update but "personal email" to the employee
Code Snippet
DECLARE @xml NVARCHAR(MAX)DECLARE @body NVARCHAR(MAX)
SET @xml =CAST(( SELECT
FirstName AS 'td','',
LastName AS 'td','' ,
SET @body = @body + @xml +'</table></body></html>'
EXEC msdb.dbo.sp_send_dbmail
@recipients =''
@copy_recipients='www@iec.com',
@body = @body,
@body_format ='HTML',
@subject ='test',
@profile_name ='bob'
END
ELSE
print 'no email today'
TNX
View 2 Replies
View Related