Monday, September 2, 2013

CMD to find & kill the process with port Number

Use cmd and type as :
1. C:\>netstat -ano
     It will display the Active Connections with columns (Proto|Local Address|Foreign Address|State|PID)
     

2. To find specific active process :
     C:\>netstat -o -n -a | findstr 127.0.0.1:2020
           TCP    127.0.0.1:2020         0.0.0.0:0              LISTENING       1276
           
           OR
           
     C:\>netstat -ano | find "2020"
           TCP    127.0.0.1:2020         0.0.0.0:0              LISTENING       1276
           

3. To kill the processe in windows OS, use processid (PID) in this below command :
     C:\>taskkill /F /PID 7068
     SUCCESS: The process with PID 7068 has been terminated.


4. If we try to kill the process which does not exist in the active connection list, then
     C:\>taskkill /F /PID 7068
     ERROR: The process "7068" not found.

Friday, July 26, 2013

Convert XLS to CSV to String

package com;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
import java.util.Scanner;

import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;

import au.com.bytecode.opencsv.CSVWriter;

public class Xls2Csv2TxtConvertOne {

 public static void getXls2Csv(final String fileInput,
   final String fileOutput) {
  FileInputStream input_document = null;
  try {
   input_document = new FileInputStream(new File(fileInput));
  } catch (FileNotFoundException e) {
   e.printStackTrace();
  }
  HSSFWorkbook my_xls_workbook = null;
  try {
   my_xls_workbook = new HSSFWorkbook(input_document);
  } catch (IOException e) {
   e.printStackTrace();
  }
  HSSFSheet my_worksheet = my_xls_workbook.getSheetAt(0);
  Iterator<Row> rowIterator = my_worksheet.iterator();
  FileWriter my_csv = null;
  try {
   my_csv = new FileWriter(fileOutput);
  } catch (IOException e) {
   e.printStackTrace();
  }
  CSVWriter my_csv_output = new CSVWriter(my_csv);
  while (rowIterator.hasNext()) {
   Row row = rowIterator.next();
   int i = 0;
   String[] csvdata = new String[6];
   Iterator<Cell> cellIterator = row.cellIterator();
   while (cellIterator.hasNext()) {
    Cell cell = cellIterator.next();
    switch (cell.getCellType()) {
    case Cell.CELL_TYPE_STRING:
     csvdata[i] = cell.getStringCellValue();
     break;
    case Cell.CELL_TYPE_NUMERIC:
     csvdata[i] = String.valueOf((int) cell
       .getNumericCellValue());
     break;
    case Cell.CELL_TYPE_BLANK:
     csvdata[i] = "";
     break;
    }
    i = i + 1;
   }
   my_csv_output.writeNext(csvdata);
  }
  try {
   input_document.close();
   my_csv_output.close();
  } catch (IOException e) {
   e.printStackTrace();
  }
 }

 public static void getCsv2Txt(final String csvFileName) {
  Scanner scanner = null;
  try {
   scanner = new Scanner(new File(csvFileName));
  } catch (FileNotFoundException e) {
   e.printStackTrace();
  }
  scanner.useDelimiter(",");
  while (scanner.hasNext()) {
   System.out.print(scanner.next() + "|");
  }
  scanner.close();
 }

 /**
  * @param args
  */
 public static void main(String[] args) {

  String fileInput = "D:\\DEV\\Notes\\GeoIPCountryWhois30.xls";
  String fileOutput = "D:\\DEV\\Notes\\GeoIPCountryWhois30.csv";
  
  Xls2Csv2TxtConvertOne.getXls2Csv(fileInput,fileOutput);
  Xls2Csv2TxtConvertOne.getCsv2Txt(fileOutput);
 }

} 
 

 
output:
-------
"1.0.0.0"|"1.0.0.255"|"16777216"|"16777471"|"AU"|"Australia"
"1.0.1.0"|"1.0.3.255"|"16777472"|"16778239"|"CN"|"China"
"1.0.4.0"|"1.0.7.255"|"16778240"|"16779263"|"AU"|"Australia"
"1.0.8.0"|"1.0.15.255"|"16779264"|"16781311"|"CN"|"China"
"1.0.16.0"|"1.0.31.255"|"16781312"|"16785407"|"JP"|"Japan"
"1.0.32.0"|"1.0.63.255"|"16785408"|"16793599"|"CN"|"China"
"1.0.64.0"|"1.0.127.255"|"16793600"|"16809983"|"JP"|"Japan"
"1.0.128.0"|"1.0.255.255"|"16809984"|"16842751"|"TH"|"Thailand"
"1.1.0.0"|"1.1.0.255"|"16842752"|"16843007"|"CN"|"China"
"1.1.1.0"|"1.1.1.255"|"16843008"|"16843263"|"AU"|"Australia"
"1.1.2.0"|"1.1.63.255"|"16843264"|"16859135"|"CN"|"China"
"1.1.64.0"|"1.1.127.255"|"16859136"|"16875519"|"JP"|"Japan"
"1.1.128.0"|"1.1.255.255"|"16875520"|"16908287"|"TH"|"Thailand"
"1.2.0.0"|"1.2.2.255"|"16908288"|"16909055"|"CN"|"China"
"1.2.3.0"|"1.2.3.255"|"16909056"|"16909311"|"AU"|"Australia"
"1.2.4.0"|"1.2.127.255"|"16909312"|"16941055"|"CN"|"China"
"1.2.128.0"|"1.2.255.255"|"16941056"|"16973823"|"TH"|"Thailand"
"1.3.0.0"|"1.3.255.255"|"16973824"|"17039359"|"CN"|"China"
"1.4.0.0"|"1.4.0.255"|"17039360"|"17039615"|"AU"|"Australia"
"1.4.1.0"|"1.4.127.255"|"17039616"|"17072127"|"CN"|"China"
"1.4.128.0"|"1.4.255.255"|"17072128"|"17104895"|"TH"|"Thailand"
"1.5.0.0"|"1.5.255.255"|"17104896"|"17170431"|"JP"|"Japan"
"1.6.0.0"|"1.7.255.255"|"17170432"|"17301503"|"IN"|"India"
"1.8.0.0"|"1.8.255.255"|"17301504"|"17367039"|"CN"|"China"
"1.9.0.0"|"1.9.255.255"|"17367040"|"17432575"|"MY"|"Malaysia"
"1.10.0.0"|"1.10.9.255"|"17432576"|"17435135"|"CN"|"China"
"1.10.10.0"|"1.10.10.255"|"17435136"|"17435391"|"AU"|"Australia"
"1.10.11.0"|"1.10.127.255"|"17435392"|"17465343"|"CN"|"China"
"1.10.128.0"|"1.10.255.255"|"17465344"|"17498111"|"TH"|"Thailand"
"1.11.0.0"|"1.11.255.255"|"17498112"|"17563647"|"KR"|"Korea| Republic of"
 

Thursday, June 13, 2013

Read CSV file and parse

 
 
package com;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;

import org.apache.commons.csv.CSVParser;

import au.com.bytecode.opencsv.CSVReader;

public class ReadCSVFileAndSplit {

 /**
  * @param args
  */
 public static void main(String[] args) {
  BufferedReader br = null;
   
  try {
 
   String sCurrentLine;
   String[] values = null;
   br = new BufferedReader(new FileReader("C:\\testing.csv"));
 
   while ((sCurrentLine = br.readLine()) != null) {
    values = sCurrentLine.split(",");
   }
   for (String str  : values){
    System.out.println(str);
   }
 
  } catch (IOException e) {
   e.printStackTrace();
  } finally {
   try {
    if (br != null)br.close();
   } catch (IOException ex) {
    ex.printStackTrace();
   }
  }
  
  System.out.println("************Using CSVReader from opencsv API************");
  CSVReader reader;
  try {
   reader = new CSVReader(new FileReader("C:\\testing.csv"));
   String[] myEntries = reader.readNext();
  
   for (String str  :  myEntries){
    System.out.println(str);
   }
   
  } catch (IOException e) {
   e.printStackTrace();
  }
  System.out.println("************Using CSVParser from apache common csv API************");
   try {
   CSVParser csvParser = new CSVParser(new FileReader("C:\\testing.csv"));
   String[] myEntries = csvParser.getLine();
   
   for (String str  :  myEntries){
    System.out.println(str);
   }
  } catch (FileNotFoundException e) {
   e.printStackTrace();
  } catch (IOException e) {
   e.printStackTrace();
  }
     
 }

} 
pom.xml
 
<project>
............
 <build>
    <sourceDirectory>src</sourceDirectory>
    <plugins>
      <plugin>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>3.0</version>
        <configuration>
          <source>1.7</source>
          <target>1.7</target>
        </configuration>
      </plugin>
    </plugins>
   
  </build>
  <dependencies>
  <dependency>
 <groupId>org.apache.solr</groupId>
 <artifactId>solr-commons-csv</artifactId>
 <version>3.5.0</version>
</dependency>
<dependency>
 <groupId>net.sf.opencsv</groupId>
 <artifactId>opencsv</artifactId>
 <version>2.3</version>
</dependency>
  </dependencies>
</project> 


output:

test
me
for
free
************Using CSVReader from opencsv API************
test
me
for
free
************Using CSVParser from apache common csv API************
test
me
for
free 
 
 
 
Note : 
 
I have used both : solr-commons-csv-3.5.0.jar & opencsv-2.3.jar

 

Tuesday, June 11, 2013

Time difference using Joda Time Utility jar

/*
 * Thanooj - using Joda Time Utility jar 
 */
package com;

import java.text.SimpleDateFormat;
import java.util.Date;

import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.Hours;
import org.joda.time.Minutes;
import org.joda.time.Seconds;

public class JodaDemo {

 /**
  * @param args
  */
 public static void main(String[] args) {

  String dateStart = "01/11/2012 08:28:36";
  String dateStop = "01/12/2012 10:31:40";

  SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");

  Date d1 = null;
  Date d2 = null;

  try {
   d1 = format.parse(dateStart);
   d2 = format.parse(dateStop);

   DateTime dt1 = new DateTime(d1);
   DateTime dt2 = new DateTime(d2);
   System.out.println("Time it took to execute this method ::\n");
   System.out.print(Days.daysBetween(dt1, dt2).getDays() + " days, ");
   System.out.print(Hours.hoursBetween(dt1, dt2).getHours() % 24 + " hours, ");
   System.out.print(Minutes.minutesBetween(dt1, dt2).getMinutes() % 60 + " minutes and ");
   System.out.print(Seconds.secondsBetween(dt1, dt2).getSeconds() % 60 + " seconds.");
   
  } catch (Exception e) {
   e.printStackTrace();
  }
 }

}


output:
-------
Time it took to execute this method ::

1 days, 2 hours, 3 minutes and 4 seconds.

Thursday, May 23, 2013

Interrupt a Quartz's job using InterruptableJob

/* 
 * DemoInterruptableJob.java
 */

package com;

import java.util.Calendar;

import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.UnableToInterruptJobException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 
 * An implementation of an InterruptableJob
 * 
* * @author thanooj kalathuru */
public class DemoInterruptableJob implements InterruptableJob { // logging services private static Logger _log = LoggerFactory.getLogger(DemoInterruptableJob.class); // job name private String _jobName = ""; private boolean isInterrupted = false; /** * * Empty constructor for job initialization *
*/
public DemoInterruptableJob() { } /** * * Called by the {@link org.quartz.Scheduler} when a * {@link org.quartz.Trigger} fires that is associated with the * Job. *
* * @throws JobExecutionException * if there is an exception while executing the job. */
public void execute(JobExecutionContext context) throws JobExecutionException { int i = 0; try { _jobName = context.getJobDetail().getFullName(); _log.info("---- " + _jobName + " executing at " + Calendar.getInstance().getTime()); while (i < 1000000000L) { if(isInterrupted){ _log.info("- inside isInterrupted -"); throw new InterruptedException(); }else{ if(i%100000000L == 0) _log.info("---- i:: " + i); i++; } } _log.info("---- " + _jobName + " completing at " + Calendar.getInstance().getTime()); }catch (InterruptedException e) { _log.info("- isInterrupted at --- i:: " + i); return; } } @Override public void interrupt() throws UnableToInterruptJobException { _log.info("--INTERUPTING-- "); isInterrupted = true; } } *********************************** /* * InterruptExample.java * */ package com; import java.util.Iterator; import java.util.List; import org.quartz.CronTrigger; import org.quartz.JobDetail; import org.quartz.JobExecutionContext; import org.quartz.Scheduler; import org.quartz.SchedulerFactory; import org.quartz.SchedulerMetaData; import org.quartz.impl.StdSchedulerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class InterruptExample { private static Logger log = LoggerFactory.getLogger(InterruptExample.class); public void run() throws Exception { System.setProperty("org.quartz.scheduler.skipUpdateCheck", "true"); SchedulerFactory sf = new StdSchedulerFactory(); Scheduler scheduler = sf.getScheduler(); JobDetail job = new JobDetail("interruptableJob1", "group1", DemoInterruptableJob.class); CronTrigger trigger = new CronTrigger(); trigger.setName("dummyTriggerName"); trigger.setCronExpression("0/7 * * * * ?"); scheduler.scheduleJob(job, trigger); scheduler.start(); try { @SuppressWarnings("unchecked") List<JobExecutionContext> jobsList = (List<JobExecutionContext>) scheduler .getCurrentlyExecutingJobs(); Iterator<JobExecutionContext> jobsIterator = jobsList .listIterator(); log.info("----------jobsList size: " + jobsList.size()); while (jobsIterator.hasNext()) { JobExecutionContext context = (JobExecutionContext) jobsIterator .next(); log.info("----------" + context.getJobDetail().getFullName()); } Thread.sleep(3000L); log.info("----****--- Started interrupt Scheduler -----------------"); scheduler.interrupt(job.getName(), job.getGroup()); log.info("----****--- ended interrupt Scheduler -----------------"); } catch (Exception e) { e.printStackTrace(); } SchedulerMetaData metaData = scheduler.getMetaData(); log.info("Executed " + metaData.getNumberOfJobsExecuted() + " jobs."); } public static void main(String[] args) throws Exception { InterruptExample example = new InterruptExample(); example.run(); } } ****************************************** 2013-05-23 17:05:06 INFO SimpleThreadPool:270 - Job execution threads will use class loader of thread: main 2013-05-23 17:05:06 INFO SchedulerSignalerImpl:60 - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl 2013-05-23 17:05:06 INFO QuartzScheduler:222 - Quartz Scheduler v.1.8.5 created. 2013-05-23 17:05:06 INFO RAMJobStore:139 - RAMJobStore initialized. 2013-05-23 17:05:06 INFO QuartzScheduler:244 - Scheduler meta-data: Quartz Scheduler (v1.8.5) 'DefaultQuartzScheduler' with instanceId 'NON_CLUSTERED' Scheduler class: 'org.quartz.core.QuartzScheduler' - running locally. NOT STARTED. Currently in standby mode. Number of jobs executed: 0 Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with 10 threads. Using job-store 'org.quartz.simpl.RAMJobStore' - which does not support persistence. and is not clustered. 2013-05-23 17:05:06 INFO StdSchedulerFactory:1280 - Quartz scheduler 'DefaultQuartzScheduler' initialized from default resource file in Quartz package: 'quartz.properties' 2013-05-23 17:05:06 INFO StdSchedulerFactory:1284 - Quartz scheduler version: 1.8.5 2013-05-23 17:05:06 INFO QuartzScheduler:500 - Scheduler DefaultQuartzScheduler_$_NON_CLUSTERED started. 2013-05-23 17:05:06 INFO InterruptExample:42 - ----------jobsList size: 0 2013-05-23 17:05:07 DEBUG SimpleJobFactory:50 - Producing instance of Job 'group1.interruptableJob1', class=com.DemoInterruptableJob 2013-05-23 17:05:07 DEBUG JobRunShell:215 - Calling execute on job group1.interruptableJob1 2013-05-23 17:05:07 INFO DemoInterruptableJob:56 - ---- group1.interruptableJob1 executing at Thu May 23 17:05:07 IST 2013 2013-05-23 17:05:07 INFO DemoInterruptableJob:64 - ---- i:: 0 2013-05-23 17:05:07 INFO DemoInterruptableJob:64 - ---- i:: 100000000 2013-05-23 17:05:07 INFO DemoInterruptableJob:64 - ---- i:: 200000000 2013-05-23 17:05:07 INFO DemoInterruptableJob:64 - ---- i:: 300000000 2013-05-23 17:05:08 INFO DemoInterruptableJob:64 - ---- i:: 400000000 2013-05-23 17:05:08 INFO DemoInterruptableJob:64 - ---- i:: 500000000 2013-05-23 17:05:08 INFO DemoInterruptableJob:64 - ---- i:: 600000000 2013-05-23 17:05:08 INFO DemoInterruptableJob:64 - ---- i:: 700000000 2013-05-23 17:05:09 INFO InterruptExample:49 - ----****--- Started interrupt Scheduler ----------------- 2013-05-23 17:05:09 INFO DemoInterruptableJob:77 - --INTERUPTING-- 2013-05-23 17:05:09 INFO InterruptExample:51 - ----****--- ended interrupt Scheduler ----------------- 2013-05-23 17:05:09 INFO DemoInterruptableJob:60 - - inside isInterrupted - 2013-05-23 17:05:09 INFO InterruptExample:57 - Executed 1 jobs. 2013-05-23 17:05:09 INFO DemoInterruptableJob:70 - - isInterrupted at --- i:: 781135745 2013-05-23 17:05:14 DEBUG SimpleJobFactory:50 - Producing instance of Job 'group1.interruptableJob1', class=com.DemoInterruptableJob 2013-05-23 17:05:14 DEBUG JobRunShell:215 - Calling execute on job group1.interruptableJob1 2013-05-23 17:05:14 INFO DemoInterruptableJob:56 - ---- group1.interruptableJob1 executing at Thu May 23 17:05:14 IST 2013 2013-05-23 17:05:14 INFO DemoInterruptableJob:64 - ---- i:: 0 2013-05-23 17:05:14 INFO DemoInterruptableJob:64 - ---- i:: 100000000 2013-05-23 17:05:14 INFO DemoInterruptableJob:64 - ---- i:: 200000000 2013-05-23 17:05:14 INFO DemoInterruptableJob:64 - ---- i:: 300000000 2013-05-23 17:05:15 INFO DemoInterruptableJob:64 - ---- i:: 400000000 2013-05-23 17:05:15 INFO DemoInterruptableJob:64 - ---- i:: 500000000 2013-05-23 17:05:15 INFO DemoInterruptableJob:64 - ---- i:: 600000000 2013-05-23 17:05:16 INFO DemoInterruptableJob:64 - ---- i:: 700000000 2013-05-23 17:05:16 INFO DemoInterruptableJob:64 - ---- i:: 800000000 2013-05-23 17:05:16 INFO DemoInterruptableJob:64 - ---- i:: 900000000 2013-05-23 17:05:16 INFO DemoInterruptableJob:68 - ---- group1.interruptableJob1 completing at Thu May 23 17:05:16 IST 2013